@@ -93,18 +93,16 @@ pub struct Parser<'a> {
93
93
/// Use span from this token if you need an isolated span.
94
94
pub token : Token ,
95
95
/// The current non-normalized token if it's different from `token`.
96
- /// Preferable use is through the `unnormalized_token()` getter.
97
96
/// Use span from this token if you need to concatenate it with some neighbouring spans.
98
- pub unnormalized_token : Option < Token > ,
97
+ unnormalized_token : Token ,
99
98
/// The previous normalized token.
100
99
/// Use span from this token if you need an isolated span.
101
100
prev_token : Token ,
102
101
/// The previous non-normalized token if it's different from `prev_token`.
103
- /// Preferable use is through the `unnormalized_prev_token()` getter.
104
102
/// Use span from this token if you need to concatenate it with some neighbouring spans.
105
- unnormalized_prev_token : Option < Token > ,
106
- /// Equivalent to `unnormalized_prev_token() .span`.
107
- /// FIXME: Remove in favor of `(unnormalized_)prev_token() .span`.
103
+ unnormalized_prev_token : Token ,
104
+ /// Equivalent to `unnormalized_prev_token.span`.
105
+ /// FIXME: Remove in favor of `(unnormalized_)prev_token.span`.
108
106
pub prev_span : Span ,
109
107
restrictions : Restrictions ,
110
108
/// Used to determine the path to externally loaded source files.
@@ -378,9 +376,9 @@ impl<'a> Parser<'a> {
378
376
let mut parser = Parser {
379
377
sess,
380
378
token : Token :: dummy ( ) ,
381
- unnormalized_token : None ,
379
+ unnormalized_token : Token :: dummy ( ) ,
382
380
prev_token : Token :: dummy ( ) ,
383
- unnormalized_prev_token : None ,
381
+ unnormalized_prev_token : Token :: dummy ( ) ,
384
382
prev_span : DUMMY_SP ,
385
383
restrictions : Restrictions :: empty ( ) ,
386
384
recurse_into_file_modules,
@@ -422,14 +420,6 @@ impl<'a> Parser<'a> {
422
420
parser
423
421
}
424
422
425
- fn unnormalized_token ( & self ) -> & Token {
426
- self . unnormalized_token . as_ref ( ) . unwrap_or ( & self . token )
427
- }
428
-
429
- fn unnormalized_prev_token ( & self ) -> & Token {
430
- self . unnormalized_prev_token . as_ref ( ) . unwrap_or ( & self . prev_token )
431
- }
432
-
433
423
fn next_tok ( & mut self , fallback_span : Span ) -> Token {
434
424
let mut next = if self . desugar_doc_comments {
435
425
self . token_cursor . next_desugared ( )
@@ -899,18 +889,17 @@ impl<'a> Parser<'a> {
899
889
// Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`)
900
890
// tokens are replaced with usual identifier and lifetime tokens,
901
891
// so the former are never encountered during normal parsing.
902
- fn normalize_token ( token : & Token ) -> Option < Token > {
903
- match & token. kind {
892
+ crate fn set_token ( & mut self , token : Token ) {
893
+ self . unnormalized_token = token;
894
+ self . token = match & self . unnormalized_token . kind {
904
895
token:: Interpolated ( nt) => match * * nt {
905
896
token:: NtIdent ( ident, is_raw) => {
906
- Some ( Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span ) )
897
+ Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span )
907
898
}
908
- token:: NtLifetime ( ident) => {
909
- Some ( Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) )
910
- }
911
- _ => None ,
899
+ token:: NtLifetime ( ident) => Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) ,
900
+ _ => self . unnormalized_token . clone ( ) ,
912
901
} ,
913
- _ => None ,
902
+ _ => self . unnormalized_token . clone ( ) ,
914
903
}
915
904
}
916
905
@@ -925,13 +914,11 @@ impl<'a> Parser<'a> {
925
914
// Update the current and previous tokens.
926
915
self . prev_token = self . token . take ( ) ;
927
916
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
928
- self . token = self . next_tok ( self . unnormalized_prev_token ( ) . span ) ;
929
- if let Some ( normalized_token) = Self :: normalize_token ( & self . token ) {
930
- self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
931
- }
917
+ let next_token = self . next_tok ( self . unnormalized_prev_token . span ) ;
918
+ self . set_token ( next_token) ;
932
919
933
920
// Update fields derived from the previous token.
934
- self . prev_span = self . unnormalized_prev_token ( ) . span ;
921
+ self . prev_span = self . unnormalized_prev_token . span ;
935
922
936
923
self . expected_tokens . clear ( ) ;
937
924
}
@@ -945,13 +932,10 @@ impl<'a> Parser<'a> {
945
932
// Update the current and previous tokens.
946
933
self . prev_token = self . token . take ( ) ;
947
934
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
948
- self . token = Token :: new ( next, span) ;
949
- if let Some ( normalized_token) = Self :: normalize_token ( & self . token ) {
950
- self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
951
- }
935
+ self . set_token ( Token :: new ( next, span) ) ;
952
936
953
937
// Update fields derived from the previous token.
954
- self . prev_span = self . unnormalized_prev_token ( ) . span . with_hi ( span. lo ( ) ) ;
938
+ self . prev_span = self . unnormalized_prev_token . span . with_hi ( span. lo ( ) ) ;
955
939
956
940
self . expected_tokens . clear ( ) ;
957
941
}
@@ -1096,8 +1080,7 @@ impl<'a> Parser<'a> {
1096
1080
& mut self . token_cursor . frame ,
1097
1081
self . token_cursor . stack . pop ( ) . unwrap ( ) ,
1098
1082
) ;
1099
- self . token = Token :: new ( TokenKind :: CloseDelim ( frame. delim ) , frame. span . close ) ;
1100
- self . unnormalized_token = None ;
1083
+ self . set_token ( Token :: new ( TokenKind :: CloseDelim ( frame. delim ) , frame. span . close ) ) ;
1101
1084
self . bump ( ) ;
1102
1085
TokenTree :: Delimited ( frame. span , frame. delim , frame. tree_cursor . stream . into ( ) )
1103
1086
}
0 commit comments