@@ -401,7 +401,7 @@ crate enum TokenType {
401
401
impl TokenType {
402
402
crate fn to_string ( & self ) -> String {
403
403
match * self {
404
- TokenType :: Token ( ref t) => format ! ( "`{}`" , pprust:: token_to_string ( t) ) ,
404
+ TokenType :: Token ( ref t) => format ! ( "`{}`" , pprust:: token_kind_to_string ( t) ) ,
405
405
TokenType :: Keyword ( kw) => format ! ( "`{}`" , kw) ,
406
406
TokenType :: Operator => "an operator" . to_string ( ) ,
407
407
TokenType :: Lifetime => "lifetime" . to_string ( ) ,
@@ -418,7 +418,7 @@ impl TokenType {
418
418
///
419
419
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
420
420
/// that `IDENT` is not the ident of a fn trait.
421
- fn can_continue_type_after_non_fn_ident ( t : & TokenKind ) -> bool {
421
+ fn can_continue_type_after_non_fn_ident ( t : & Token ) -> bool {
422
422
t == & token:: ModSep || t == & token:: Lt ||
423
423
t == & token:: BinOp ( token:: Shl )
424
424
}
@@ -586,10 +586,10 @@ impl<'a> Parser<'a> {
586
586
edible : & [ TokenKind ] ,
587
587
inedible : & [ TokenKind ] ,
588
588
) -> PResult < ' a , bool /* recovered */ > {
589
- if edible. contains ( & self . token ) {
589
+ if edible. contains ( & self . token . kind ) {
590
590
self . bump ( ) ;
591
591
Ok ( false )
592
- } else if inedible. contains ( & self . token ) {
592
+ } else if inedible. contains ( & self . token . kind ) {
593
593
// leave it in the input
594
594
Ok ( false )
595
595
} else if self . last_unexpected_token_span == Some ( self . token . span ) {
@@ -951,7 +951,7 @@ impl<'a> Parser<'a> {
951
951
Err ( mut e) => {
952
952
// Attempt to keep parsing if it was a similar separator
953
953
if let Some ( ref tokens) = t. similar_tokens ( ) {
954
- if tokens. contains ( & self . token ) {
954
+ if tokens. contains ( & self . token . kind ) {
955
955
self . bump ( ) ;
956
956
}
957
957
}
@@ -1756,7 +1756,7 @@ impl<'a> Parser<'a> {
1756
1756
fn parse_path_segment ( & mut self , style : PathStyle ) -> PResult < ' a , PathSegment > {
1757
1757
let ident = self . parse_path_segment_ident ( ) ?;
1758
1758
1759
- let is_args_start = |token : & TokenKind | match * token {
1759
+ let is_args_start = |token : & Token | match token. kind {
1760
1760
token:: Lt | token:: BinOp ( token:: Shl ) | token:: OpenDelim ( token:: Paren )
1761
1761
| token:: LArrow => true ,
1762
1762
_ => false ,
@@ -2822,7 +2822,7 @@ impl<'a> Parser<'a> {
2822
2822
LhsExpr :: AttributesParsed ( attrs) => Some ( attrs) ,
2823
2823
_ => None ,
2824
2824
} ;
2825
- if [ token:: DotDot , token:: DotDotDot , token:: DotDotEq ] . contains ( & self . token ) {
2825
+ if [ token:: DotDot , token:: DotDotDot , token:: DotDotEq ] . contains ( & self . token . kind ) {
2826
2826
return self . parse_prefix_range_expr ( attrs) ;
2827
2827
} else {
2828
2828
self . parse_prefix_expr ( attrs) ?
@@ -3099,7 +3099,7 @@ impl<'a> Parser<'a> {
3099
3099
self . err_dotdotdot_syntax ( self . token . span ) ;
3100
3100
}
3101
3101
3102
- debug_assert ! ( [ token:: DotDot , token:: DotDotDot , token:: DotDotEq ] . contains( & self . token) ,
3102
+ debug_assert ! ( [ token:: DotDot , token:: DotDotDot , token:: DotDotEq ] . contains( & self . token. kind ) ,
3103
3103
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq" ,
3104
3104
self . token) ;
3105
3105
let tok = self . token . clone ( ) ;
@@ -7867,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
7867
7867
for unmatched in unclosed_delims. iter ( ) {
7868
7868
let mut err = handler. struct_span_err ( unmatched. found_span , & format ! (
7869
7869
"incorrect close delimiter: `{}`" ,
7870
- pprust:: token_to_string ( & token:: CloseDelim ( unmatched. found_delim) ) ,
7870
+ pprust:: token_kind_to_string ( & token:: CloseDelim ( unmatched. found_delim) ) ,
7871
7871
) ) ;
7872
7872
err. span_label ( unmatched. found_span , "incorrect close delimiter" ) ;
7873
7873
if let Some ( sp) = unmatched. candidate_span {
0 commit comments