Skip to content

Commit 5a1e640

Browse files
committed
make Parker::*() must_use and fix panic in *_if_each
1 parent 59a2f69 commit 5a1e640

File tree

2 files changed

+49
-12
lines changed

2 files changed

+49
-12
lines changed

CHANGELOG.md

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
66

77
## [Unreleased]
88
### Added
9-
- `quote::ToTokens` implementation for `TokenParser` (`quote` is a new default feature)
10-
- `peek_{token}` and `peek_n_{token}` to `TokenParser`
9+
- `quote::ToTokens` implementation for `TokenParser` (`quote` is a new default feature).
10+
- `peek_{token}` and `peek_n_{token}` to `TokenParser`.
1111

1212
### Changed
13-
- **Breaking Change** added const generic buffer size to `TokenParser`
14-
- **Breaking Change** `Peeker::peek` takes `&[TokenTree]` instead of `TokenParser`
15-
- `TokenParser` peeking supports `n` greater than stack buffer, allowing spilling to heap
16-
- increased default `TokenParser` peek buffer to `6`
13+
- **Breaking Change** Added const generic buffer size to `TokenParser`.
14+
- **Breaking Change** `Peeker::peek` takes `&[TokenTree]` instead of `TokenParser`.
15+
- `TokenParser` peeking supports `n` greater than stack buffer, allowing spilling to heap.
16+
- Increased default `TokenParser` peek buffer to `6`.
17+
- Marked parser functions as must_use.
1718

1819
## [0.6.0] - 2023-04-29
1920
- `TokenParser::next_keyword(v)`

src/parser.rs

Lines changed: 42 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,14 @@ pub trait Peeker {
1717
/// # Panics
1818
///
1919
/// Implementations can panic if `tokens.len() < Self::LENGTH`.
20+
#[must_use]
2021
fn peek(self, tokens: &[TokenTree]) -> bool;
2122
}
2223

2324
impl<T: FnOnce(&TokenTree) -> bool> Peeker for T {
2425
const LENGTH: usize = 1;
2526

27+
#[must_use]
2628
fn peek(self, parser: &[TokenTree]) -> bool {
2729
self(&parser[0])
2830
}
@@ -84,6 +86,7 @@ impl_peeker![
8486
/// additionally.
8587
#[allow(clippy::module_name_repetitions)]
8688
#[derive(Clone)]
89+
#[must_use]
8790
pub struct TokenParser<
8891
I: Iterator<Item = TokenTree> = token_stream::IntoIter,
8992
const PEEKER_LEN: usize = 6,
@@ -135,6 +138,7 @@ impl<I, const PEEKER_LEN: usize> From<TokenParser<I, PEEKER_LEN>> for TokenStrea
135138
where
136139
I: Iterator<Item = TokenTree>,
137140
{
141+
#[must_use]
138142
fn from(value: TokenParser<I, PEEKER_LEN>) -> Self {
139143
value.iter.collect()
140144
}
@@ -146,6 +150,7 @@ where
146150
{
147151
type Item = TokenTree;
148152

153+
#[must_use]
149154
fn next(&mut self) -> Option<Self::Item> {
150155
if self.peek.is_empty() {
151156
self.iter.next()
@@ -164,10 +169,12 @@ where
164169
tokens.extend(self.clone());
165170
}
166171

172+
#[must_use]
167173
fn to_token_stream(&self) -> TokenStream {
168174
self.clone().collect()
169175
}
170176

177+
#[must_use]
171178
fn into_token_stream(self) -> TokenStream
172179
where
173180
Self: Sized,
@@ -179,14 +186,17 @@ where
179186
macro_rules! punct {
180187
($($punct:literal, [$($tests:ident),*; $last:ident], $peek:ident, $peek_n:ident, $name:ident);*$(;)?) => {
181188
$(#[doc = concat!("Returns the next token if it is a `", $punct ,"`")]
189+
#[must_use]
182190
pub fn $name(&mut self) -> Option<TokenStream> {
183191
self.next_if_each(($(|t:&TokenTree|t.is_joint() && t.$tests(),)* |t:&TokenTree| t.is_alone() && t.$last()))
184192
})*
185193
$(#[doc = concat!("Returns the next token if it is a `", $punct ,"` without advancing the parser")]
194+
#[must_use]
186195
pub fn $peek(&mut self) -> Option<TokenStream> {
187196
self.$peek_n(0)
188197
})*
189198
$(#[doc = concat!("Returns the `n`th token if it is a `", $punct ,"` without advancing the parser")]
199+
#[must_use]
190200
pub fn $peek_n(&mut self, n:usize) -> Option<TokenStream> {
191201
self.peek_n_if_each(n, ($(|t:&TokenTree|t.is_joint() && t.$tests(),)* |t:&TokenTree| t.is_alone() && t.$last()))
192202
})*
@@ -199,16 +209,19 @@ macro_rules! punct {
199209
macro_rules! token_tree {
200210
($($a:literal, $test:ident, $peek_as:ident, $as:ident, $peek:ident, $peek_n:ident, $name:ident, $token:ident);*$(;)?) => {
201211
$(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`].")]
212+
#[must_use]
202213
pub fn $name(&mut self) -> Option<$token> {
203214
self.$peek().is_some().then(|| self.next().expect("token should be present").$as().expect(concat!("should be ", stringify!($token))))
204215
})*
205216

206217
$(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
218+
#[must_use]
207219
pub fn $peek(&mut self) -> Option<&$token> {
208220
self.$peek_n(0)
209221
})*
210222

211223
$(#[doc = concat!("Returns the `n`th token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
224+
#[must_use]
212225
pub fn $peek_n(&mut self, n: usize) -> Option<&$token> {
213226
self.peek_n(n).and_then(TokenTree::$peek_as)
214227
})*
@@ -218,17 +231,20 @@ macro_rules! token_tree {
218231
macro_rules! delimited {
219232
($($test:ident, $peek:ident, $peek_n:ident, $name:ident, $doc:literal;)*) => {
220233
$(#[doc = concat!("Returns the next token if it is a ", $doc ," group.")]
234+
#[must_use]
221235
pub fn $name(&mut self) -> Option<TokenStream> {
222236
self.$peek().map(|stream| {
223237
self.next().unwrap();
224238
stream
225239
})
226240
})*
227241
$(#[doc = concat!("Returns the next token if it is a", $doc ," group, without advancing the parser.")]
242+
#[must_use]
228243
pub fn $peek(&mut self) -> Option<TokenStream> {
229244
self.$peek_n(0)
230245
})*
231246
$(#[doc = concat!("Returns the `n`th token if it is a ", $doc ," group, without advancing the parser.")]
247+
#[must_use]
232248
pub fn $peek_n(&mut self, n: usize) -> Option<TokenStream> {
233249
self.peek_n(n).and_then(|token|
234250
token.$test().then(|| token.group().unwrap().stream()))
@@ -242,11 +258,13 @@ where
242258
I: Iterator<Item = TokenTree>,
243259
{
244260
/// Checks if there are remaining tokens
261+
#[must_use]
245262
pub fn is_empty(&mut self) -> bool {
246263
self.peek().is_none()
247264
}
248265

249266
/// Peeks the next token without advancing the parser
267+
#[must_use]
250268
pub fn peek(&mut self) -> Option<&TokenTree> {
251269
if self.peek.is_empty() {
252270
self.peek.push(self.iter.next()?);
@@ -255,6 +273,7 @@ where
255273
}
256274

257275
/// Peeks the `n`th token without advancing the parser
276+
#[must_use]
258277
pub fn peek_n(&mut self, n: usize) -> Option<&TokenTree> {
259278
for _ in self.peek.len()..=n {
260279
self.peek.push(self.iter.next()?);
@@ -264,39 +283,48 @@ where
264283

265284
/// Returns the next token if it fulfills the condition otherwise returns
266285
/// None and doesn't advance the parser
286+
#[must_use]
267287
pub fn next_if(&mut self, test: impl FnOnce(&TokenTree) -> bool) -> Option<TokenTree> {
268288
test(self.peek()?).then(|| self.next().expect("was peeked"))
269289
}
270290

271291
/// Returns the next tokens if they fulfill the conditions
272292
/// otherwise returns None and doesn't advance the parser
293+
#[must_use]
273294
pub fn next_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
274295
// Ensure peek is filled;
275-
self.peek_n(P::LENGTH);
296+
if PEEKER_LEN > 0 {
297+
self.peek_n(P::LENGTH - 1)?;
298+
}
276299
tests
277300
.peek(&self.peek[..P::LENGTH])
278301
.then(|| self.peek.drain(0..P::LENGTH).collect())
279302
}
280303

281304
/// Returns the next tokens if they fulfill the conditions
282305
/// otherwise returns None, without advancing the parser
306+
#[must_use]
283307
pub fn peek_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
284308
// Ensure peek is filled;
285309
self.peek_n_if_each(0, tests)
286310
}
287311

288-
/// Returns the next tokens from `n` (up to 3) if they fulfill the
312+
/// Returns the next tokens from `n` if they fulfill the
289313
/// conditions otherwise returns None, without advancing the parser
314+
#[must_use]
290315
pub fn peek_n_if_each<P: Peeker>(&mut self, n: usize, tests: P) -> Option<TokenStream> {
291316
// Ensure peek is filled;
292-
self.peek_n(P::LENGTH + n);
317+
if PEEKER_LEN > 0 {
318+
self.peek_n(P::LENGTH + n - 1)?;
319+
}
293320
let peeked = &self.peek[n..P::LENGTH + n];
294321
tests.peek(peeked).then(|| peeked.iter().cloned().collect())
295322
}
296323

297324
/// Returns all tokens while `test` evaluates to true.
298325
///
299326
/// Returns `None` if empty or `test(first_token) == false`
327+
#[must_use]
300328
pub fn next_while(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
301329
if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
302330
None
@@ -313,6 +341,7 @@ where
313341
/// Returns all tokens while `test` evaluates to false.
314342
///
315343
/// Returns `None` if empty or `test(first_token) == true`.
344+
#[must_use]
316345
pub fn next_until(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
317346
self.next_while(|token| !test(token))
318347
}
@@ -323,19 +352,21 @@ where
323352
I: Iterator<Item = TokenTree>,
324353
{
325354
/// Collects remaining tokens back into a [`TokenStream`]
355+
#[must_use]
326356
pub fn into_token_stream(self) -> TokenStream {
327357
self.into()
328358
}
329359

330360
/// Returns the next group of punctuation with [`Punct::spacing`]
331361
/// [`Spacing::Joint`]
362+
#[must_use]
332363
pub fn next_punctuation_group(&mut self) -> Option<TokenStream> {
333364
let mut joined = true;
334-
dbg!(self.next_while(move |token| {
335-
let ret = joined && dbg!(token.is_punct());
365+
self.next_while(move |token| {
366+
let ret = joined && token.is_punct();
336367
joined = token.is_joint();
337368
ret
338-
}))
369+
})
339370
}
340371

341372
/// Returns the next ident if it matches the specified keyword.
@@ -351,6 +382,7 @@ where
351382
/// assert_eq!(parser.next_keyword("out").unwrap().to_string(), "out");
352383
/// assert!(parser.next_keyword("anything").is_none());
353384
/// ```
385+
#[must_use]
354386
pub fn next_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<Ident>
355387
where
356388
Ident: PartialEq<K>,
@@ -381,6 +413,7 @@ where
381413
/// assert!(tokens.next_type().is_none());
382414
/// assert_tokens!(tokens, { , remainder });
383415
/// ```
416+
#[must_use]
384417
pub fn next_type(&mut self) -> Option<TokenStream> {
385418
let Some(first) = self.peek() else { return None; };
386419
if first.is_comma() || first.is_semi() {
@@ -424,6 +457,7 @@ where
424457
/// assert!(tokens.next_expression().is_none());
425458
/// assert_tokens!(tokens, { , next_token });
426459
/// ```
460+
#[must_use]
427461
pub fn next_expression(&mut self) -> Option<TokenStream> {
428462
if self.peek().is_none()
429463
|| matches!(self.peek(), Some(token) if token.is_comma() || token.is_semi())
@@ -472,6 +506,7 @@ where
472506
}
473507

474508
/// Returns the next string literal
509+
#[must_use]
475510
pub fn next_string(&mut self) -> Option<String> {
476511
if !self.peek()?.is_literal() {
477512
return None;
@@ -490,6 +525,7 @@ where
490525
}
491526

492527
/// Returns the next boolean literal
528+
#[must_use]
493529
pub fn next_bool(&mut self) -> Option<bool> {
494530
self.next_if(|t| {
495531
t.ident()

0 commit comments

Comments
 (0)