Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit ffa2e7a

Browse files
committed
Auto merge of rust-lang#77255 - Aaron1011:feature/collect-attr-tokens, r=petrochenkov
Unconditionally capture tokens for attributes. This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to. This is based on PR rust-lang#77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
2 parents 89fdb30 + 5c7d8d0 commit ffa2e7a

File tree

19 files changed

+251
-138
lines changed

19 files changed

+251
-138
lines changed

compiler/rustc_ast/src/ast.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2423,6 +2423,7 @@ pub struct Attribute {
24232423
/// or the construct this attribute is contained within (inner).
24242424
pub style: AttrStyle,
24252425
pub span: Span,
2426+
pub tokens: Option<LazyTokenStream>,
24262427
}
24272428

24282429
#[derive(Clone, Encodable, Decodable, Debug)]

compiler/rustc_ast/src/attr/mod.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri
325325
}
326326

327327
pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
328-
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span }
328+
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None }
329329
}
330330

331331
/// Returns an inner attribute with the given value and span.
@@ -344,7 +344,13 @@ pub fn mk_doc_comment(
344344
data: Symbol,
345345
span: Span,
346346
) -> Attribute {
347-
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span }
347+
Attribute {
348+
kind: AttrKind::DocComment(comment_kind, data),
349+
id: mk_attr_id(),
350+
style,
351+
span,
352+
tokens: None,
353+
}
348354
}
349355

350356
pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {

compiler/rustc_ast/src/mut_visit.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
577577
}
578578

579579
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
580-
let Attribute { kind, id: _, style: _, span } = attr;
580+
let Attribute { kind, id: _, style: _, span, tokens: _ } = attr;
581581
match kind {
582582
AttrKind::Normal(AttrItem { path, args, tokens: _ }) => {
583583
vis.visit_path(path);

compiler/rustc_ast_lowering/src/expr.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -210,9 +210,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
210210
ex.span = e.span;
211211
}
212212
// Merge attributes into the inner expression.
213-
let mut attrs = e.attrs.clone();
213+
let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
214214
attrs.extend::<Vec<_>>(ex.attrs.into());
215-
ex.attrs = attrs;
215+
ex.attrs = attrs.into();
216216
return ex;
217217
}
218218

@@ -1471,13 +1471,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
14711471
hir::MatchSource::ForLoopDesugar,
14721472
));
14731473

1474+
let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1475+
14741476
// This is effectively `{ let _result = ...; _result }`.
14751477
// The construct was introduced in #21984 and is necessary to make sure that
14761478
// temporaries in the `head` expression are dropped and do not leak to the
14771479
// surrounding scope of the `match` since the `match` is not a terminating scope.
14781480
//
14791481
// Also, add the attributes to the outer returned expr node.
1480-
self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone())
1482+
self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
14811483
}
14821484

14831485
/// Desugar `ExprKind::Try` from: `<expr>?` into:

compiler/rustc_ast_lowering/src/lib.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -972,7 +972,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
972972
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
973973
};
974974

975-
Attribute { kind, id: attr.id, style: attr.style, span: attr.span }
975+
// Tokens aren't needed after macro expansion and parsing
976+
Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None }
976977
}
977978

978979
fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
@@ -1713,7 +1714,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
17131714
pat: self.lower_pat(&l.pat),
17141715
init,
17151716
span: l.span,
1716-
attrs: l.attrs.clone(),
1717+
attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
17171718
source: hir::LocalSource::Normal,
17181719
},
17191720
ids,

compiler/rustc_builtin_macros/src/cmdline_attrs.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
1515
);
1616

1717
let start_span = parser.token.span;
18-
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item() {
18+
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) {
1919
Ok(ai) => ai,
2020
Err(mut err) => {
2121
err.emit();

compiler/rustc_expand/src/config.rs

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,13 @@
33
use rustc_ast::attr::HasAttrs;
44
use rustc_ast::mut_visit::*;
55
use rustc_ast::ptr::P;
6+
use rustc_ast::token::{DelimToken, Token, TokenKind};
7+
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
68
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
79
use rustc_attr as attr;
810
use rustc_data_structures::fx::FxHashMap;
911
use rustc_data_structures::map_in_place::MapInPlace;
12+
use rustc_data_structures::sync::Lrc;
1013
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
1114
use rustc_feature::{Feature, Features, State as FeatureState};
1215
use rustc_feature::{
@@ -289,7 +292,37 @@ impl<'a> StripUnconfigured<'a> {
289292
expanded_attrs
290293
.into_iter()
291294
.flat_map(|(item, span)| {
292-
let attr = attr::mk_attr_from_item(attr.style, item, span);
295+
let orig_tokens =
296+
attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr));
297+
298+
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
299+
// and producing an attribute of the form `#[attr]`. We
300+
// have captured tokens for `attr` itself, but we need to
301+
// synthesize tokens for the wrapper `#` and `[]`, which
302+
// we do below.
303+
304+
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
305+
// for `attr` when we expand it to `#[attr]`
306+
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
307+
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
308+
panic!("Bad tokens for attribute {:?}", attr);
309+
}
310+
// We don't really have a good span to use for the syntheized `[]`
311+
// in `#[attr]`, so just use the span of the `#` token.
312+
let bracket_group = TokenTree::Delimited(
313+
DelimSpan::from_single(pound_token.span()),
314+
DelimToken::Bracket,
315+
item.tokens
316+
.clone()
317+
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
318+
.into_token_stream(),
319+
);
320+
321+
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
322+
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
323+
(pound_token, Spacing::Alone),
324+
(bracket_group, Spacing::Alone),
325+
]))));
293326
self.process_cfg_attr(attr)
294327
})
295328
.collect()

compiler/rustc_expand/src/expand.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1785,6 +1785,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
17851785
span: at.span,
17861786
id: at.id,
17871787
style: at.style,
1788+
tokens: None,
17881789
};
17891790
} else {
17901791
noop_visit_attribute(at, self)

compiler/rustc_interface/src/passes.rs

Lines changed: 70 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@ use crate::interface::{Compiler, Result};
22
use crate::proc_macro_decls;
33
use crate::util;
44

5-
use rustc_ast::mut_visit::MutVisitor;
6-
use rustc_ast::{self as ast, visit};
5+
use rustc_ast::mut_visit::{self, MutVisitor};
6+
use rustc_ast::ptr::P;
7+
use rustc_ast::{self as ast, token, visit};
78
use rustc_codegen_ssa::back::link::emit_metadata;
89
use rustc_codegen_ssa::traits::CodegenBackend;
910
use rustc_data_structures::sync::{par_iter, Lrc, OnceCell, ParallelIterator, WorkerLocal};
@@ -36,6 +37,7 @@ use rustc_span::symbol::Symbol;
3637
use rustc_span::{FileName, RealFileName};
3738
use rustc_trait_selection::traits;
3839
use rustc_typeck as typeck;
40+
use smallvec::SmallVec;
3941
use tracing::{info, warn};
4042

4143
use rustc_serialize::json;
@@ -50,6 +52,64 @@ use std::path::PathBuf;
5052
use std::rc::Rc;
5153
use std::{env, fs, iter, mem};
5254

55+
/// Remove alls `LazyTokenStreams` from an AST struct
56+
/// Normally, this is done during AST lowering. However,
57+
/// printing the AST JSON requires us to serialize
58+
/// the entire AST, and we don't want to serialize
59+
/// a `LazyTokenStream`.
60+
struct TokenStripper;
61+
impl mut_visit::MutVisitor for TokenStripper {
62+
fn flat_map_item(&mut self, mut i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
63+
i.tokens = None;
64+
mut_visit::noop_flat_map_item(i, self)
65+
}
66+
fn visit_block(&mut self, b: &mut P<ast::Block>) {
67+
b.tokens = None;
68+
mut_visit::noop_visit_block(b, self);
69+
}
70+
fn flat_map_stmt(&mut self, mut stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
71+
stmt.tokens = None;
72+
mut_visit::noop_flat_map_stmt(stmt, self)
73+
}
74+
fn visit_pat(&mut self, p: &mut P<ast::Pat>) {
75+
p.tokens = None;
76+
mut_visit::noop_visit_pat(p, self);
77+
}
78+
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
79+
ty.tokens = None;
80+
mut_visit::noop_visit_ty(ty, self);
81+
}
82+
fn visit_attribute(&mut self, attr: &mut ast::Attribute) {
83+
attr.tokens = None;
84+
if let ast::AttrKind::Normal(ast::AttrItem { tokens, .. }) = &mut attr.kind {
85+
*tokens = None;
86+
}
87+
mut_visit::noop_visit_attribute(attr, self);
88+
}
89+
90+
fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) {
91+
if let token::Nonterminal::NtMeta(meta) = nt {
92+
meta.tokens = None;
93+
}
94+
// Handles all of the other cases
95+
mut_visit::noop_visit_interpolated(nt, self);
96+
}
97+
98+
fn visit_path(&mut self, p: &mut ast::Path) {
99+
p.tokens = None;
100+
mut_visit::noop_visit_path(p, self);
101+
}
102+
fn visit_vis(&mut self, vis: &mut ast::Visibility) {
103+
vis.tokens = None;
104+
mut_visit::noop_visit_vis(vis, self);
105+
}
106+
fn visit_expr(&mut self, e: &mut P<ast::Expr>) {
107+
e.tokens = None;
108+
mut_visit::noop_visit_expr(e, self);
109+
}
110+
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {}
111+
}
112+
53113
pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
54114
let krate = sess.time("parse_crate", || match input {
55115
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
@@ -59,6 +119,10 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
59119
})?;
60120

61121
if sess.opts.debugging_opts.ast_json_noexpand {
122+
// Set any `token` fields to `None` before
123+
// we display the AST.
124+
let mut krate = krate.clone();
125+
TokenStripper.visit_crate(&mut krate);
62126
println!("{}", json::as_json(&krate));
63127
}
64128

@@ -379,6 +443,10 @@ fn configure_and_expand_inner<'a>(
379443
}
380444

381445
if sess.opts.debugging_opts.ast_json {
446+
// Set any `token` fields to `None` before
447+
// we display the AST.
448+
let mut krate = krate.clone();
449+
TokenStripper.visit_crate(&mut krate);
382450
println!("{}", json::as_json(&krate));
383451
}
384452

compiler/rustc_middle/src/ich/impls_syntax.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,12 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> {
4040
debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
4141
debug_assert!(!attr.is_doc_comment());
4242

43-
let ast::Attribute { kind, id: _, style, span } = attr;
43+
let ast::Attribute { kind, id: _, style, span, tokens } = attr;
4444
if let ast::AttrKind::Normal(item) = kind {
4545
item.hash_stable(self, hasher);
4646
style.hash_stable(self, hasher);
4747
span.hash_stable(self, hasher);
48+
tokens.as_ref().expect_none("Tokens should have been removed during lowering!");
4849
} else {
4950
unreachable!();
5051
}

0 commit comments

Comments
 (0)