Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit b1e6dee

Browse files
committed
Merge TokenCursor::{next,next_desugared}.
And likewise for the inlined variants. I did this for simplicity, but interesting it was a performance win as well.
1 parent 89ec75b commit b1e6dee

File tree

2 files changed

+71
-85
lines changed

2 files changed

+71
-85
lines changed

compiler/rustc_parse/src/parser/attr_wrapper.rs

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -100,21 +100,16 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
100100

101101
impl CreateTokenStream for LazyTokenStreamImpl {
102102
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
103-
// The token produced by the final call to `{,inlined_}next` or
104-
// `{,inlined_}next_desugared` was not actually consumed by the
105-
// callback. The combination of chaining the initial token and using
106-
// `take` produces the desired result - we produce an empty
107-
// `TokenStream` if no calls were made, and omit the final token
108-
// otherwise.
103+
// The token produced by the final call to `{,inlined_}next` was not
104+
// actually consumed by the callback. The combination of chaining the
105+
// initial token and using `take` produces the desired result - we
106+
// produce an empty `TokenStream` if no calls were made, and omit the
107+
// final token otherwise.
109108
let mut cursor_snapshot = self.cursor_snapshot.clone();
110109
let tokens =
111110
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
112111
.chain((0..self.num_calls).map(|_| {
113-
let token = if cursor_snapshot.desugar_doc_comments {
114-
cursor_snapshot.next_desugared()
115-
} else {
116-
cursor_snapshot.next()
117-
};
112+
let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
118113
(FlatToken::Token(token.0), token.1)
119114
}))
120115
.take(self.num_calls);

compiler/rustc_parse/src/parser/mod.rs

Lines changed: 65 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -206,9 +206,7 @@ struct TokenCursor {
206206
frame: TokenCursorFrame,
207207
stack: Vec<TokenCursorFrame>,
208208
desugar_doc_comments: bool,
209-
// Counts the number of calls to `{,inlined_}next` or
210-
// `{,inlined_}next_desugared`, depending on whether
211-
// `desugar_doc_comments` is set.
209+
// Counts the number of calls to `{,inlined_}next`.
212210
num_next_calls: usize,
213211
// During parsing, we may sometimes need to 'unglue' a
214212
// glued token into two component tokens
@@ -256,14 +254,14 @@ impl TokenCursorFrame {
256254
}
257255

258256
impl TokenCursor {
259-
fn next(&mut self) -> (Token, Spacing) {
260-
self.inlined_next()
257+
fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
258+
self.inlined_next(desugar_doc_comments)
261259
}
262260

263261
/// This always-inlined version should only be used on hot code paths.
264262
#[inline(always)]
265-
fn inlined_next(&mut self) -> (Token, Spacing) {
266-
loop {
263+
fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
264+
let (token, spacing) = loop {
267265
let (tree, spacing) = if !self.frame.open_delim {
268266
self.frame.open_delim = true;
269267
TokenTree::token(token::OpenDelim(self.frame.delim), self.frame.span.open).into()
@@ -281,77 +279,74 @@ impl TokenCursor {
281279

282280
match tree {
283281
TokenTree::Token(token) => {
284-
return (token, spacing);
282+
break (token, spacing);
285283
}
286284
TokenTree::Delimited(sp, delim, tts) => {
287285
let frame = TokenCursorFrame::new(sp, delim, tts);
288286
self.stack.push(mem::replace(&mut self.frame, frame));
289287
}
290288
}
291-
}
292-
}
289+
};
293290

294-
fn next_desugared(&mut self) -> (Token, Spacing) {
295-
self.inlined_next_desugared()
296-
}
291+
match (desugar_doc_comments, &token) {
292+
(true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
293+
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
294+
// required to wrap the text.
295+
let mut num_of_hashes = 0;
296+
let mut count = 0;
297+
for ch in data.as_str().chars() {
298+
count = match ch {
299+
'"' => 1,
300+
'#' if count > 0 => count + 1,
301+
_ => 0,
302+
};
303+
num_of_hashes = cmp::max(num_of_hashes, count);
304+
}
297305

298-
/// This always-inlined version should only be used on hot code paths.
299-
#[inline(always)]
300-
fn inlined_next_desugared(&mut self) -> (Token, Spacing) {
301-
let (data, attr_style, sp) = match self.inlined_next() {
302-
(Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
303-
(data, attr_style, span)
306+
let delim_span = DelimSpan::from_single(span);
307+
let body = TokenTree::Delimited(
308+
delim_span,
309+
token::Bracket,
310+
[
311+
TokenTree::token(token::Ident(sym::doc, false), span),
312+
TokenTree::token(token::Eq, span),
313+
TokenTree::token(
314+
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
315+
span,
316+
),
317+
]
318+
.iter()
319+
.cloned()
320+
.collect::<TokenStream>(),
321+
);
322+
323+
self.stack.push(mem::replace(
324+
&mut self.frame,
325+
TokenCursorFrame::new(
326+
delim_span,
327+
token::NoDelim,
328+
if attr_style == AttrStyle::Inner {
329+
[
330+
TokenTree::token(token::Pound, span),
331+
TokenTree::token(token::Not, span),
332+
body,
333+
]
334+
.iter()
335+
.cloned()
336+
.collect::<TokenStream>()
337+
} else {
338+
[TokenTree::token(token::Pound, span), body]
339+
.iter()
340+
.cloned()
341+
.collect::<TokenStream>()
342+
},
343+
),
344+
));
345+
346+
self.next(/* desugar_doc_comments */ false)
304347
}
305-
tok => return tok,
306-
};
307-
308-
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
309-
// required to wrap the text.
310-
let mut num_of_hashes = 0;
311-
let mut count = 0;
312-
for ch in data.as_str().chars() {
313-
count = match ch {
314-
'"' => 1,
315-
'#' if count > 0 => count + 1,
316-
_ => 0,
317-
};
318-
num_of_hashes = cmp::max(num_of_hashes, count);
348+
_ => (token, spacing),
319349
}
320-
321-
let delim_span = DelimSpan::from_single(sp);
322-
let body = TokenTree::Delimited(
323-
delim_span,
324-
token::Bracket,
325-
[
326-
TokenTree::token(token::Ident(sym::doc, false), sp),
327-
TokenTree::token(token::Eq, sp),
328-
TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
329-
]
330-
.iter()
331-
.cloned()
332-
.collect::<TokenStream>(),
333-
);
334-
335-
self.stack.push(mem::replace(
336-
&mut self.frame,
337-
TokenCursorFrame::new(
338-
delim_span,
339-
token::NoDelim,
340-
if attr_style == AttrStyle::Inner {
341-
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
342-
.iter()
343-
.cloned()
344-
.collect::<TokenStream>()
345-
} else {
346-
[TokenTree::token(token::Pound, sp), body]
347-
.iter()
348-
.cloned()
349-
.collect::<TokenStream>()
350-
},
351-
),
352-
));
353-
354-
self.next()
355350
}
356351
}
357352

@@ -1010,11 +1005,7 @@ impl<'a> Parser<'a> {
10101005
pub fn bump(&mut self) {
10111006
let fallback_span = self.token.span;
10121007
loop {
1013-
let (mut next, spacing) = if self.desugar_doc_comments {
1014-
self.token_cursor.inlined_next_desugared()
1015-
} else {
1016-
self.token_cursor.inlined_next()
1017-
};
1008+
let (mut next, spacing) = self.token_cursor.inlined_next(self.desugar_doc_comments);
10181009
self.token_cursor.num_next_calls += 1;
10191010
// We've retrieved an token from the underlying
10201011
// cursor, so we no longer need to worry about
@@ -1063,7 +1054,7 @@ impl<'a> Parser<'a> {
10631054
let mut i = 0;
10641055
let mut token = Token::dummy();
10651056
while i < dist {
1066-
token = cursor.next().0;
1057+
token = cursor.next(/* desugar_doc_comments */ false).0;
10671058
if matches!(
10681059
token.kind,
10691060
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)

0 commit comments

Comments
 (0)