Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -907,6 +907,12 @@ impl TokenTreeCursor {
pub fn bump(&mut self) {
self.index += 1;
}

// For skipping ahead in rare circumstances.
#[inline]
pub fn bump_to_end(&mut self) {
self.index = self.stream.len();
}
}

/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
Expand Down
23 changes: 17 additions & 6 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1389,15 +1389,26 @@ impl<'a> Parser<'a> {
// matching `CloseDelim` we are *after* the delimited sequence,
// i.e. at depth `d - 1`.
let target_depth = self.token_cursor.stack.len() - 1;
loop {
// Advance one token at a time, so `TokenCursor::next()`
// can capture these tokens if necessary.

if let Capturing::No = self.capture_state.capturing {
// We are not capturing tokens, so skip to the end of the
// delimited sequence. This is a perf win when dealing with
// declarative macros that pass large `tt` fragments through
// multiple rules, as seen in the uom-0.37.0 crate.
self.token_cursor.curr.bump_to_end();
self.bump();
if self.token_cursor.stack.len() == target_depth {
debug_assert!(self.token.kind.close_delim().is_some());
break;
debug_assert_eq!(self.token_cursor.stack.len(), target_depth);
} else {
loop {
// Advance one token at a time, so `TokenCursor::next()`
// can capture these tokens if necessary.
self.bump();
if self.token_cursor.stack.len() == target_depth {
break;
}
}
}
debug_assert!(self.token.kind.close_delim().is_some());

// Consume close delimiter
self.bump();
Expand Down
Loading