Skip to content

Commit 56fca89

Browse files
committed
Auto merge of #7146 - alexcrichton:faster-proc-macro, r=Eh2406
Optimize runtime of `#[cargo_test_macro]` I've noticed recently that the incremental compile time for our test suite has felt like it's increased quite a bit. I think one reason is that everything has to go through `#[cargo_test_macro]` unconditionally on all incremental builds, and wow do we have a lot of tests being pumped through that macro. Instrumenting the macro a little bit shows that we spend nearly 2.5 seconds on each compilation simply executing this macro (note that it's in debug mode as well, not release since we typically don't execute tests in release mode. This commit instead drops the usage of `syn` and `quote` in favor of a "raw procedural macro" which is much more optimized for just our use case, even in debug mode. This drops the collective time spent in the macro to 0.2 seconds, even in debug mode!
2 parents 2b21fa6 + 8887b67 commit 56fca89

File tree

2 files changed

+51
-22
lines changed

2 files changed

+51
-22
lines changed

crates/cargo-test-macro/Cargo.toml

-4
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,3 @@ description = "Helper proc-macro for Cargo's testsuite."
1111

1212
[lib]
1313
proc-macro = true
14-
15-
[dependencies]
16-
quote = "0.6"
17-
syn = { version = "0.15", features = ["full"] }

crates/cargo-test-macro/src/lib.rs

+51-18
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,59 @@
11
extern crate proc_macro;
22

3-
use quote::{quote, ToTokens};
4-
use syn::{parse::Parser, *};
3+
use proc_macro::*;
54

65
#[proc_macro_attribute]
7-
pub fn cargo_test(
8-
_attr: proc_macro::TokenStream,
9-
item: proc_macro::TokenStream,
10-
) -> proc_macro::TokenStream {
11-
let mut fn_def = parse_macro_input!(item as ItemFn);
6+
pub fn cargo_test(_attr: TokenStream, item: TokenStream) -> TokenStream {
7+
let span = Span::call_site();
8+
let mut ret = TokenStream::new();
9+
ret.extend(Some(TokenTree::from(Punct::new('#', Spacing::Alone))));
10+
let test = TokenTree::from(Ident::new("test", span));
11+
ret.extend(Some(TokenTree::from(Group::new(
12+
Delimiter::Bracket,
13+
test.into(),
14+
))));
1215

13-
let attr = quote! {
14-
#[test]
15-
};
16-
fn_def
17-
.attrs
18-
.extend(Attribute::parse_outer.parse2(attr).unwrap());
16+
for token in item {
17+
let group = match token {
18+
TokenTree::Group(g) => {
19+
if g.delimiter() == Delimiter::Brace {
20+
g
21+
} else {
22+
ret.extend(Some(TokenTree::Group(g)));
23+
continue;
24+
}
25+
}
26+
other => {
27+
ret.extend(Some(other));
28+
continue;
29+
}
30+
};
1931

20-
let stmt = quote! {
21-
let _test_guard = crate::support::paths::init_root();
22-
};
23-
fn_def.block.stmts.insert(0, parse2(stmt).unwrap());
32+
let mut new_body = vec![
33+
TokenTree::from(Ident::new("let", span)),
34+
TokenTree::from(Ident::new("_test_guard", span)),
35+
TokenTree::from(Punct::new('=', Spacing::Alone)),
36+
TokenTree::from(Ident::new("crate", span)),
37+
TokenTree::from(Punct::new(':', Spacing::Joint)),
38+
TokenTree::from(Punct::new(':', Spacing::Alone)),
39+
TokenTree::from(Ident::new("support", span)),
40+
TokenTree::from(Punct::new(':', Spacing::Joint)),
41+
TokenTree::from(Punct::new(':', Spacing::Alone)),
42+
TokenTree::from(Ident::new("paths", span)),
43+
TokenTree::from(Punct::new(':', Spacing::Joint)),
44+
TokenTree::from(Punct::new(':', Spacing::Alone)),
45+
TokenTree::from(Ident::new("init_root", span)),
46+
TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new())),
47+
TokenTree::from(Punct::new(';', Spacing::Alone)),
48+
]
49+
.into_iter()
50+
.collect::<TokenStream>();
51+
new_body.extend(group.stream());
52+
ret.extend(Some(TokenTree::from(Group::new(
53+
group.delimiter(),
54+
new_body,
55+
))));
56+
}
2457

25-
fn_def.into_token_stream().into()
58+
return ret;
2659
}

0 commit comments

Comments
 (0)