Skip to content

Commit bddc0ed

Browse files
committed
add next_macro_rules_tt to TokenParser
1 parent 8f2ef81 commit bddc0ed

File tree

4 files changed

+80
-19
lines changed

4 files changed

+80
-19
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
77
## [Unreleased]
88
### Added
99
- `*_tt` functions to `TokenParser`
10+
- `next_macro_rules_tt()` to `TokenParser`
11+
12+
### Fixed
13+
- `assert_expansion!` was failing on nightly
1014

1115
## [0.8.0] - 2023-05-14
1216
### Fixed

src/assert.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
/// Allows simple unit testing of proc macro implementations.
22
///
33
/// This macro only works with functions taking [`proc_macro2::TokenStream`] due
4-
/// to the [`proc_macro`] api not being available in unit tests. This can be
5-
/// achieved either by manually creating a seperate function:
4+
/// to the [`proc_macro`] API not being available in unit tests. This can be
5+
/// achieved either by manually creating a separate function:
66
/// ```ignore
77
/// use proc_macro::TokenStream;
88
/// use proc_macro2::TokenStream as TokenStream2;
@@ -253,7 +253,7 @@ macro_rules! assert_tokens {
253253
$crate::assert_tokens!(@G $lhs, next_bracketed, $aggr, '[', [ $($inner)* ], { $($inner)* }, $($rhs)*);
254254
};
255255
(@O $lhs:ident, $aggr:expr, $token:tt $($rhs:tt)*) => {
256-
if let Some(lhs) = $lhs.next_punctuation_group().map(|t|t.to_string()).or_else(|| $lhs.next().map(|t|t.to_string())) {
256+
if let Some(lhs) = $lhs.next_macro_rules_tt().map(|t|t.to_string()).or_else(|| $lhs.next().map(|t|t.to_string())) {
257257
if(lhs != stringify!($token)) {
258258
$crate::assert_tokens!(@E $aggr, ($token), lhs);
259259
}
@@ -278,4 +278,6 @@ fn test() {
278278
assert_tokens!(quote!(more:::test::test:: hello :-D $$$ It should just work), {
279279
more ::: test ::test:: hello :-D $$$ It should just work
280280
});
281+
282+
assert_tokens!(quote!(:$), {: $});
281283
}

src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
//! Some useful functions on [`proc_macro`] and [`proc_macro2`] types
22
//!
33
//! E.g. [pushing tokens onto `TokenStream`](TokenStreamExt::push) and [testing
4-
//! for specific punctuation on `TokenTree` and Punct](TokenTreePunct)
4+
//! for specific punctuation on `TokenTree` and `Punct`](TokenTreePunct)
55
//!
66
//! It also adds the [`assert_tokens!`] and [`assert_expansion!`] macros to
7-
//! improve unit testability for proc-macros.
7+
//! improve unit testability for `proc-macros`.
88
#![warn(clippy::pedantic, missing_docs)]
99
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
1010
#![deny(rustdoc::all)]

src/parser.rs

Lines changed: 69 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -288,20 +288,20 @@ assert_tokens!(parser.", stringify!($peek_n), "(1).unwrap(), { ", $punct, " });
288288
pub fn $peek_n(&mut self, n: usize) -> Option<TokenStream> {
289289
peek_punct!(n, self, $($cond)*)
290290
})*
291-
/// Returns the next token tree as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
292-
/// or [composed punctionation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
291+
/// Returns the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
292+
/// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
293293
pub fn next_tt(&mut self) -> Option<TokenStream> {
294294
self.next_if_each(TokenTree::is_group)
295295
.or_else(|| self.next_if_each(TokenTree::is_literal))
296296
$(.or_else(|| self.$name()))*
297297
}
298-
/// Peeks the next token tree as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
299-
/// or [composed punctionation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
298+
/// Peeks the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
299+
/// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
300300
pub fn peek_tt(&mut self) -> Option<TokenStream> {
301301
self.peek_n_tt(0)
302302
}
303-
/// Peeks the next token tree from the `n`th token as intepreted by the `tt` type in `macro_rules`, i.e., any literal, group,
304-
/// or [composed punctionation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
303+
/// Peeks the next token tree from the `n`th token as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
304+
/// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
305305
pub fn peek_n_tt(&mut self, n: usize) -> Option<TokenStream> {
306306
self.peek_if_each(TokenTree::is_group)
307307
.or_else(|| self.peek_if_each(TokenTree::is_literal))
@@ -425,6 +425,7 @@ where
425425
/// assert_tokens!(parser.next_if(TokenTreePunct::is_alone), { : });
426426
/// ```
427427
#[must_use]
428+
#[allow(clippy::missing_panics_doc)]
428429
pub fn next_if(&mut self, test: impl FnOnce(&TokenTree) -> bool) -> Option<TokenTree> {
429430
test(self.peek()?).then(|| self.next().expect("was peeked"))
430431
}
@@ -512,6 +513,7 @@ where
512513
///
513514
/// Returns `None` if empty or `test(first_token) == false`
514515
#[must_use]
516+
#[allow(clippy::missing_panics_doc)]
515517
pub fn next_while(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
516518
if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
517519
None
@@ -531,6 +533,7 @@ where
531533
///
532534
/// Returns `None` if empty or `test(first_token) == false`
533535
#[must_use]
536+
#[allow(clippy::missing_panics_doc)]
534537
pub fn next_while_alone(
535538
&mut self,
536539
mut test: impl FnMut(&TokenTree) -> bool,
@@ -571,10 +574,10 @@ where
571574

572575
/// Returns the next `n` tokens.
573576
///
574-
/// Returns `None` if the parser contains less then `n` tokens.
577+
/// Returns `None` if the parser contains less than `n` tokens.
575578
///
576579
/// **Note:** This should only be used for small `n` ideally less than
577-
/// `PEEKER_LEN`. Otherwise something like this would be more performant:
580+
/// `PEEKER_LEN`. Otherwise, something like this would be more performant:
578581
/// ```
579582
/// use proc_macro2::TokenStream;
580583
/// use proc_macro_utils::{TokenParser, assert_tokens};
@@ -596,10 +599,10 @@ where
596599
/// Returns the next `n` tokens. If the last token is a punct it's
597600
/// [`spacing`](Punct::spacing()) is set to [`Alone`](Spacing::Alone).
598601
///
599-
/// Returns `None` if the parser contains less then `n` tokens.
602+
/// Returns `None` if the parser contains less than `n` tokens.
600603
///
601604
/// **Note:** This should only be used for small `n` ideally less than
602-
/// `PEEKER_LEN`. Otherwise something like this would be more performant:
605+
/// `PEEKER_LEN`. Otherwise, something like this would be more performant:
603606
/// ```
604607
/// use proc_macro2::TokenStream;
605608
/// use proc_macro_utils::{TokenParser, assert_tokens, TokenTreePunct};
@@ -621,10 +624,10 @@ where
621624

622625
/// Returns the specified `range` of tokens.
623626
///
624-
/// Returns `None` if the parser does not contain this `range` tokens.
627+
/// Returns `None` if the parser does not contain these `range` tokens.
625628
///
626629
/// **Note:** This should only be used for small and close to start `range`s
627-
/// ideally less than `PEEKER_LEN`. Otherwise something like this could be
630+
/// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
628631
/// more performant:
629632
/// ```
630633
/// use proc_macro2::TokenStream;
@@ -670,10 +673,10 @@ where
670673
/// it's [`spacing`](Punct::spacing()) is set to
671674
/// [`Alone`](Spacing::Alone).
672675
///
673-
/// Returns `None` if the parser does not contain this `range` tokens.
676+
/// Returns `None` if the parser does not contain these `range` tokens.
674677
///
675678
/// **Note:** This should only be used for small and close to start `range`s
676-
/// ideally less than `PEEKER_LEN`. Otherwise something like this could be
679+
/// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
677680
/// more performant:
678681
///
679682
/// ```
@@ -799,6 +802,7 @@ where
799802
/// assert!(parser.next_keyword("anything").is_none());
800803
/// ```
801804
#[must_use]
805+
#[allow(clippy::missing_panics_doc)]
802806
pub fn next_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<Ident>
803807
where
804808
Ident: PartialEq<K>,
@@ -876,6 +880,7 @@ where
876880
/// assert_tokens!(tokens, { , next_token });
877881
/// ```
878882
#[must_use]
883+
#[allow(clippy::missing_panics_doc)]
879884
pub fn next_expression(&mut self) -> Option<TokenStream> {
880885
if self.peek().is_none()
881886
|| matches!(self.peek(), Some(token) if token.is_comma() || token.is_semi())
@@ -1114,6 +1119,56 @@ where
11141119
"?", [is_question], peek_tt_question, peek_n_tt_question, next_tt_question;
11151120
"~", [is_tilde], peek_tt_tilde, peek_n_tt_tilde, next_tt_tilde;
11161121
);
1122+
1123+
/// Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) following the same rules as [macro_rule's `tt`](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables).
1124+
///
1125+
/// ```
1126+
/// use proc_macro_utils::{assert_tokens, TokenParser};
1127+
/// use quote::quote;
1128+
/// let mut parser = TokenParser::new(quote!(.. =. 1 b));
1129+
/// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { .. });
1130+
/// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { = });
1131+
/// assert_tokens!(parser, { . 1 b });
1132+
/// ```
1133+
#[must_use]
1134+
#[allow(clippy::missing_panics_doc)]
1135+
pub fn next_macro_rules_tt(&mut self) -> Option<TokenStream> {
1136+
// ensure that the next 3 tokens are peeked if possible
1137+
_ = self.peek_n(2);
1138+
let first = self.peek.first().and_then(TokenTree::punct)?;
1139+
let second = first
1140+
.is_joint()
1141+
.then(|| self.peek.get(1).and_then(TokenTree::punct))
1142+
.flatten();
1143+
let third = second
1144+
.is_some_and(TokenTreePunct::is_joint)
1145+
.then(|| self.peek.get(2).and_then(TokenTree::punct))
1146+
.flatten();
1147+
let chars = [
1148+
first.as_char(),
1149+
second.map_or('_', Punct::as_char),
1150+
third.map_or('_', Punct::as_char),
1151+
];
1152+
if matches!(
1153+
chars,
1154+
['.', '.', '.' | '='] | ['<', '<', '='] | ['>', '>', '=']
1155+
) {
1156+
self.next_n_alone(3)
1157+
} else if matches!(
1158+
&chars[0..2],
1159+
['&', '&' | '=']
1160+
| ['|', '|' | '=']
1161+
| ['<', '<' | '=']
1162+
| ['>' | '-' | '=', '>']
1163+
| ['+' | '-' | '*' | '/' | '%' | '^' | '=' | '!' | '>', '=']
1164+
| ['.', '.']
1165+
| [':', ':']
1166+
) {
1167+
self.next_n_alone(2)
1168+
} else {
1169+
self.next_n_alone(1)
1170+
}
1171+
}
11171172
}
11181173

11191174
#[cfg(test)]

0 commit comments

Comments
 (0)