@@ -288,20 +288,20 @@ assert_tokens!(parser.", stringify!($peek_n), "(1).unwrap(), { ", $punct, " });
288288 pub fn $peek_n( & mut self , n: usize ) -> Option <TokenStream > {
289289 peek_punct!( n, self , $( $cond) * )
290290 } ) *
291- /// Returns the next token tree as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
292- /// or [composed punctionation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
291+ /// Returns the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
292+ /// or [composed punctuation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
293293 pub fn next_tt( & mut self ) -> Option <TokenStream > {
294294 self . next_if_each( TokenTree :: is_group)
295295 . or_else( || self . next_if_each( TokenTree :: is_literal) )
296296 $( . or_else( || self . $name( ) ) ) *
297297 }
298- /// Peeks the next token tree as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
299- /// or [composed punctionation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
298+ /// Peeks the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
299+ /// or [composed punctuation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
300300 pub fn peek_tt( & mut self ) -> Option <TokenStream > {
301301 self . peek_n_tt( 0 )
302302 }
303- /// Peeks the next token tree from the `n`th token as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
304- /// or [composed punctionation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
303+ /// Peeks the next token tree from the `n`th token as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
304+ /// or [composed punctuation ](https://doc.rust-lang.org/reference/tokens.html#punctuation).
305305 pub fn peek_n_tt( & mut self , n: usize ) -> Option <TokenStream > {
306306 self . peek_if_each( TokenTree :: is_group)
307307 . or_else( || self . peek_if_each( TokenTree :: is_literal) )
@@ -425,6 +425,7 @@ where
425425 /// assert_tokens!(parser.next_if(TokenTreePunct::is_alone), { : });
426426 /// ```
427427 #[ must_use]
428+ #[ allow( clippy:: missing_panics_doc) ]
428429 pub fn next_if ( & mut self , test : impl FnOnce ( & TokenTree ) -> bool ) -> Option < TokenTree > {
429430 test ( self . peek ( ) ?) . then ( || self . next ( ) . expect ( "was peeked" ) )
430431 }
@@ -512,6 +513,7 @@ where
512513 ///
513514 /// Returns `None` if empty or `test(first_token) == false`
514515 #[ must_use]
516+ #[ allow( clippy:: missing_panics_doc) ]
515517 pub fn next_while ( & mut self , mut test : impl FnMut ( & TokenTree ) -> bool ) -> Option < TokenStream > {
516518 if self . peek ( ) . is_none ( ) || !test ( self . peek ( ) . expect ( "was peeked" ) ) {
517519 None
@@ -531,6 +533,7 @@ where
531533 ///
532534 /// Returns `None` if empty or `test(first_token) == false`
533535 #[ must_use]
536+ #[ allow( clippy:: missing_panics_doc) ]
534537 pub fn next_while_alone (
535538 & mut self ,
536539 mut test : impl FnMut ( & TokenTree ) -> bool ,
@@ -571,10 +574,10 @@ where
571574
572575 /// Returns the next `n` tokens.
573576 ///
574- /// Returns `None` if the parser contains less then `n` tokens.
577+ /// Returns `None` if the parser contains less than `n` tokens.
575578 ///
576579 /// **Note:** This should only be used for small `n` ideally less than
577- /// `PEEKER_LEN`. Otherwise something like this would be more performant:
580+ /// `PEEKER_LEN`. Otherwise, something like this would be more performant:
578581 /// ```
579582 /// use proc_macro2::TokenStream;
580583 /// use proc_macro_utils::{TokenParser, assert_tokens};
@@ -596,10 +599,10 @@ where
596599 /// Returns the next `n` tokens. If the last token is a punct it's
597600 /// [`spacing`](Punct::spacing()) is set to [`Alone`](Spacing::Alone).
598601 ///
599- /// Returns `None` if the parser contains less then `n` tokens.
602+ /// Returns `None` if the parser contains less than `n` tokens.
600603 ///
601604 /// **Note:** This should only be used for small `n` ideally less than
602- /// `PEEKER_LEN`. Otherwise something like this would be more performant:
605+ /// `PEEKER_LEN`. Otherwise, something like this would be more performant:
603606 /// ```
604607 /// use proc_macro2::TokenStream;
605608 /// use proc_macro_utils::{TokenParser, assert_tokens, TokenTreePunct};
@@ -621,10 +624,10 @@ where
621624
622625 /// Returns the specified `range` of tokens.
623626 ///
624- /// Returns `None` if the parser does not contain this `range` tokens.
627+ /// Returns `None` if the parser does not contain these `range` tokens.
625628 ///
626629 /// **Note:** This should only be used for small and close to start `range`s
627- /// ideally less than `PEEKER_LEN`. Otherwise something like this could be
630+ /// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
628631 /// more performant:
629632 /// ```
630633 /// use proc_macro2::TokenStream;
@@ -670,10 +673,10 @@ where
670673 /// it's [`spacing`](Punct::spacing()) is set to
671674 /// [`Alone`](Spacing::Alone).
672675 ///
673- /// Returns `None` if the parser does not contain this `range` tokens.
676+ /// Returns `None` if the parser does not contain these `range` tokens.
674677 ///
675678 /// **Note:** This should only be used for small and close to start `range`s
676- /// ideally less than `PEEKER_LEN`. Otherwise something like this could be
679+ /// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
677680 /// more performant:
678681 ///
679682 /// ```
@@ -799,6 +802,7 @@ where
799802 /// assert!(parser.next_keyword("anything").is_none());
800803 /// ```
801804 #[ must_use]
805+ #[ allow( clippy:: missing_panics_doc) ]
802806 pub fn next_keyword < K : ?Sized > ( & mut self , keyword : & K ) -> Option < Ident >
803807 where
804808 Ident : PartialEq < K > ,
@@ -876,6 +880,7 @@ where
876880 /// assert_tokens!(tokens, { , next_token });
877881 /// ```
878882 #[ must_use]
883+ #[ allow( clippy:: missing_panics_doc) ]
879884 pub fn next_expression ( & mut self ) -> Option < TokenStream > {
880885 if self . peek ( ) . is_none ( )
881886 || matches ! ( self . peek( ) , Some ( token) if token. is_comma( ) || token. is_semi( ) )
@@ -1114,6 +1119,56 @@ where
11141119 "?" , [ is_question] , peek_tt_question, peek_n_tt_question, next_tt_question;
11151120 "~" , [ is_tilde] , peek_tt_tilde, peek_n_tt_tilde, next_tt_tilde;
11161121 ) ;
1122+
1123+ /// Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) following the same rules as [macro_rule's `tt`](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables).
1124+ ///
1125+ /// ```
1126+ /// use proc_macro_utils::{assert_tokens, TokenParser};
1127+ /// use quote::quote;
1128+ /// let mut parser = TokenParser::new(quote!(.. =. 1 b));
1129+ /// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { .. });
1130+ /// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { = });
1131+ /// assert_tokens!(parser, { . 1 b });
1132+ /// ```
1133+ #[ must_use]
1134+ #[ allow( clippy:: missing_panics_doc) ]
1135+ pub fn next_macro_rules_tt ( & mut self ) -> Option < TokenStream > {
1136+ // ensure that the next 3 tokens are peeked if possible
1137+ _ = self . peek_n ( 2 ) ;
1138+ let first = self . peek . first ( ) . and_then ( TokenTree :: punct) ?;
1139+ let second = first
1140+ . is_joint ( )
1141+ . then ( || self . peek . get ( 1 ) . and_then ( TokenTree :: punct) )
1142+ . flatten ( ) ;
1143+ let third = second
1144+ . is_some_and ( TokenTreePunct :: is_joint)
1145+ . then ( || self . peek . get ( 2 ) . and_then ( TokenTree :: punct) )
1146+ . flatten ( ) ;
1147+ let chars = [
1148+ first. as_char ( ) ,
1149+ second. map_or ( '_' , Punct :: as_char) ,
1150+ third. map_or ( '_' , Punct :: as_char) ,
1151+ ] ;
1152+ if matches ! (
1153+ chars,
1154+ [ '.' , '.' , '.' | '=' ] | [ '<' , '<' , '=' ] | [ '>' , '>' , '=' ]
1155+ ) {
1156+ self . next_n_alone ( 3 )
1157+ } else if matches ! (
1158+ & chars[ 0 ..2 ] ,
1159+ [ '&' , '&' | '=' ]
1160+ | [ '|' , '|' | '=' ]
1161+ | [ '<' , '<' | '=' ]
1162+ | [ '>' | '-' | '=' , '>' ]
1163+ | [ '+' | '-' | '*' | '/' | '%' | '^' | '=' | '!' | '>' , '=' ]
1164+ | [ '.' , '.' ]
1165+ | [ ':' , ':' ]
1166+ ) {
1167+ self . next_n_alone ( 2 )
1168+ } else {
1169+ self . next_n_alone ( 1 )
1170+ }
1171+ }
11171172}
11181173
11191174#[ cfg( test) ]
0 commit comments