Skip to content

Commit c83e217

Browse files
committed
Auto merge of #143521 - matthiaskrgr:rollup-kpv1og3, r=matthiaskrgr
Rollup of 6 pull requests Successful merges: - #143416 (mbe: Defer checks for `compile_error!` until reporting an unused macro rule) - #143470 (std: sys: net: uefi: tcp4: Implement read) - #143477 (use `is_multiple_of` and `div_ceil`) - #143484 (distinguish the duplicate item of rpitit) - #143493 (tidy: use --bless for tidy spellcheck instead of spellcheck:fix) - #143504 (compiletest: print slightly more information on fs::write failure) r? `@ghost` `@rustbot` modify labels: rollup
2 parents e804cd4 + 097efc0 commit c83e217

File tree

38 files changed

+350
-179
lines changed

38 files changed

+350
-179
lines changed

compiler/rustc_abi/src/lib.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -527,8 +527,7 @@ impl Size {
527527
/// not a multiple of 8.
528528
pub fn from_bits(bits: impl TryInto<u64>) -> Size {
529529
let bits = bits.try_into().ok().unwrap();
530-
// Avoid potential overflow from `bits + 7`.
531-
Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
530+
Size { raw: bits.div_ceil(8) }
532531
}
533532

534533
#[inline]

compiler/rustc_borrowck/src/polonius/legacy/location.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,6 @@ impl PoloniusLocationTable {
109109
impl LocationIndex {
110110
fn is_start(self) -> bool {
111111
// even indices are start points; odd indices are mid points
112-
(self.index() % 2) == 0
112+
self.index().is_multiple_of(2)
113113
}
114114
}

compiler/rustc_codegen_llvm/src/abi.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ impl LlvmType for CastTarget {
146146
"total size {:?} cannot be divided into units of zero size",
147147
self.rest.total
148148
);
149-
if self.rest.total.bytes() % self.rest.unit.size.bytes() != 0 {
149+
if !self.rest.total.bytes().is_multiple_of(self.rest.unit.size.bytes()) {
150150
assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
151151
}
152152
self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())

compiler/rustc_codegen_llvm/src/va_arg.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,10 +172,10 @@ fn emit_aapcs_va_arg<'ll, 'tcx>(
172172

173173
let gr_type = target_ty.is_any_ptr() || target_ty.is_integral();
174174
let (reg_off, reg_top, slot_size) = if gr_type {
175-
let nreg = (layout.size.bytes() + 7) / 8;
175+
let nreg = layout.size.bytes().div_ceil(8);
176176
(gr_offs, gr_top, nreg * 8)
177177
} else {
178-
let nreg = (layout.size.bytes() + 15) / 16;
178+
let nreg = layout.size.bytes().div_ceil(16);
179179
(vr_offs, vr_top, nreg * 16)
180180
};
181181

compiler/rustc_const_eval/src/interpret/memory.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -537,7 +537,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
537537

538538
#[inline]
539539
fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
540-
if offset % align.bytes() == 0 {
540+
if offset.is_multiple_of(align.bytes()) {
541541
None
542542
} else {
543543
// The biggest power of two through which `offset` is divisible.
@@ -1554,7 +1554,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
15541554
// If the allocation is N-aligned, and the offset is not divisible by N,
15551555
// then `base + offset` has a non-zero remainder after division by `N`,
15561556
// which means `base + offset` cannot be null.
1557-
if offset.bytes() % info.align.bytes() != 0 {
1557+
if !offset.bytes().is_multiple_of(info.align.bytes()) {
15581558
return interp_ok(false);
15591559
}
15601560
// We don't know enough, this might be null.

compiler/rustc_expand/src/base.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -348,6 +348,10 @@ pub trait TTMacroExpander {
348348
span: Span,
349349
input: TokenStream,
350350
) -> MacroExpanderResult<'cx>;
351+
352+
fn get_unused_rule(&self, _rule_i: usize) -> Option<(&Ident, Span)> {
353+
None
354+
}
351355
}
352356

353357
pub type MacroExpanderResult<'cx> = ExpandResult<Box<dyn MacResult + 'cx>, ()>;

compiler/rustc_expand/src/mbe/diagnostics.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use rustc_span::source_map::SourceMap;
1010
use rustc_span::{ErrorGuaranteed, Ident, Span};
1111
use tracing::debug;
1212

13-
use super::macro_rules::{NoopTracker, parser_from_cx};
13+
use super::macro_rules::{MacroRule, NoopTracker, parser_from_cx};
1414
use crate::expand::{AstFragmentKind, parse_ast_fragment};
1515
use crate::mbe::macro_parser::ParseResult::*;
1616
use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser};
@@ -22,14 +22,14 @@ pub(super) fn failed_to_match_macro(
2222
def_span: Span,
2323
name: Ident,
2424
arg: TokenStream,
25-
lhses: &[Vec<MatcherLoc>],
25+
rules: &[MacroRule],
2626
) -> (Span, ErrorGuaranteed) {
2727
debug!("failed to match macro");
2828
// An error occurred, try the expansion again, tracking the expansion closely for better
2929
// diagnostics.
3030
let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp);
3131

32-
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);
32+
let try_success_result = try_match_macro(psess, name, &arg, rules, &mut tracker);
3333

3434
if try_success_result.is_ok() {
3535
// Nonterminal parser recovery might turn failed matches into successful ones,
@@ -80,12 +80,12 @@ pub(super) fn failed_to_match_macro(
8080

8181
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
8282
if let Some((arg, comma_span)) = arg.add_comma() {
83-
for lhs in lhses {
83+
for rule in rules {
8484
let parser = parser_from_cx(psess, arg.clone(), Recovery::Allowed);
8585
let mut tt_parser = TtParser::new(name);
8686

8787
if let Success(_) =
88-
tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker)
88+
tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, &mut NoopTracker)
8989
{
9090
if comma_span.is_dummy() {
9191
err.note("you might be missing a comma");

compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 50 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ use crate::base::{
3636
};
3737
use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
3838
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
39+
use crate::mbe::quoted::{RulePart, parse_one_tt};
3940
use crate::mbe::transcribe::transcribe;
4041
use crate::mbe::{self, KleeneOp, macro_check};
4142

@@ -97,13 +98,18 @@ impl<'a> ParserAnyMacro<'a> {
9798
}
9899
}
99100

101+
pub(super) struct MacroRule {
102+
pub(super) lhs: Vec<MatcherLoc>,
103+
lhs_span: Span,
104+
rhs: mbe::TokenTree,
105+
}
106+
100107
struct MacroRulesMacroExpander {
101108
node_id: NodeId,
102109
name: Ident,
103110
span: Span,
104111
transparency: Transparency,
105-
lhses: Vec<Vec<MatcherLoc>>,
106-
rhses: Vec<mbe::TokenTree>,
112+
rules: Vec<MacroRule>,
107113
}
108114

109115
impl TTMacroExpander for MacroRulesMacroExpander {
@@ -121,10 +127,15 @@ impl TTMacroExpander for MacroRulesMacroExpander {
121127
self.name,
122128
self.transparency,
123129
input,
124-
&self.lhses,
125-
&self.rhses,
130+
&self.rules,
126131
))
127132
}
133+
134+
fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
135+
// If the rhs contains an invocation like `compile_error!`, don't report it as unused.
136+
let rule = &self.rules[rule_i];
137+
if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
138+
}
128139
}
129140

130141
struct DummyExpander(ErrorGuaranteed);
@@ -183,9 +194,8 @@ impl<'matcher> Tracker<'matcher> for NoopTracker {
183194
}
184195
}
185196

186-
/// Expands the rules based macro defined by `lhses` and `rhses` for a given
187-
/// input `arg`.
188-
#[instrument(skip(cx, transparency, arg, lhses, rhses))]
197+
/// Expands the rules based macro defined by `rules` for a given input `arg`.
198+
#[instrument(skip(cx, transparency, arg, rules))]
189199
fn expand_macro<'cx>(
190200
cx: &'cx mut ExtCtxt<'_>,
191201
sp: Span,
@@ -194,8 +204,7 @@ fn expand_macro<'cx>(
194204
name: Ident,
195205
transparency: Transparency,
196206
arg: TokenStream,
197-
lhses: &[Vec<MatcherLoc>],
198-
rhses: &[mbe::TokenTree],
207+
rules: &[MacroRule],
199208
) -> Box<dyn MacResult + 'cx> {
200209
let psess = &cx.sess.psess;
201210
// Macros defined in the current crate have a real node id,
@@ -208,15 +217,14 @@ fn expand_macro<'cx>(
208217
}
209218

210219
// Track nothing for the best performance.
211-
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut NoopTracker);
220+
let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
212221

213222
match try_success_result {
214-
Ok((i, named_matches)) => {
215-
let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
216-
mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span),
217-
_ => cx.dcx().span_bug(sp, "malformed macro rhs"),
223+
Ok((i, rule, named_matches)) => {
224+
let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else {
225+
cx.dcx().span_bug(sp, "malformed macro rhs");
218226
};
219-
let arm_span = rhses[i].span();
227+
let arm_span = rule.rhs.span();
220228

221229
// rhs has holes ( `$id` and `$(...)` that need filled)
222230
let id = cx.current_expansion.id;
@@ -262,7 +270,7 @@ fn expand_macro<'cx>(
262270
Err(CanRetry::Yes) => {
263271
// Retry and emit a better error.
264272
let (span, guar) =
265-
diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, lhses);
273+
diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules);
266274
cx.trace_macros_diag();
267275
DummyResult::any(span, guar)
268276
}
@@ -278,14 +286,14 @@ pub(super) enum CanRetry {
278286
/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
279287
/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
280288
/// correctly.
281-
#[instrument(level = "debug", skip(psess, arg, lhses, track), fields(tracking = %T::description()))]
289+
#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
282290
pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
283291
psess: &ParseSess,
284292
name: Ident,
285293
arg: &TokenStream,
286-
lhses: &'matcher [Vec<MatcherLoc>],
294+
rules: &'matcher [MacroRule],
287295
track: &mut T,
288-
) -> Result<(usize, NamedMatches), CanRetry> {
296+
) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
289297
// We create a base parser that can be used for the "black box" parts.
290298
// Every iteration needs a fresh copy of that parser. However, the parser
291299
// is not mutated on many of the iterations, particularly when dealing with
@@ -308,7 +316,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
308316
let parser = parser_from_cx(psess, arg.clone(), T::recovery());
309317
// Try each arm's matchers.
310318
let mut tt_parser = TtParser::new(name);
311-
for (i, lhs) in lhses.iter().enumerate() {
319+
for (i, rule) in rules.iter().enumerate() {
312320
let _tracing_span = trace_span!("Matching arm", %i);
313321

314322
// Take a snapshot of the state of pre-expansion gating at this point.
@@ -317,7 +325,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
317325
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
318326
let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
319327

320-
let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
328+
let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track);
321329

322330
track.after_arm(&result);
323331

@@ -328,7 +336,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
328336
// Merge the gated spans from parsing the matcher with the preexisting ones.
329337
psess.gated_spans.merge(gated_spans_snapshot);
330338

331-
return Ok((i, named_matches));
339+
return Ok((i, rule, named_matches));
332340
}
333341
Failure(_) => {
334342
trace!("Failed to match arm, trying the next one");
@@ -364,7 +372,7 @@ pub fn compile_declarative_macro(
364372
span: Span,
365373
node_id: NodeId,
366374
edition: Edition,
367-
) -> (SyntaxExtension, Vec<(usize, Span)>) {
375+
) -> (SyntaxExtension, usize) {
368376
let mk_syn_ext = |expander| {
369377
SyntaxExtension::new(
370378
sess,
@@ -377,7 +385,7 @@ pub fn compile_declarative_macro(
377385
node_id != DUMMY_NODE_ID,
378386
)
379387
};
380-
let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), Vec::new());
388+
let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0);
381389

382390
let macro_rules = macro_def.macro_rules;
383391
let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
@@ -389,21 +397,11 @@ pub fn compile_declarative_macro(
389397
let mut guar = None;
390398
let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
391399

392-
let mut lhses = Vec::new();
393-
let mut rhses = Vec::new();
400+
let mut rules = Vec::new();
394401

395402
while p.token != token::Eof {
396403
let lhs_tt = p.parse_token_tree();
397-
let lhs_tt = mbe::quoted::parse(
398-
&TokenStream::new(vec![lhs_tt]),
399-
true, // LHS
400-
sess,
401-
node_id,
402-
features,
403-
edition,
404-
)
405-
.pop()
406-
.unwrap();
404+
let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
407405
// We don't handle errors here, the driver will abort after parsing/expansion. We can
408406
// report every error in every macro this way.
409407
check_emission(check_lhs_nt_follows(sess, node_id, &lhs_tt));
@@ -421,20 +419,18 @@ pub fn compile_declarative_macro(
421419
return dummy_syn_ext(guar);
422420
}
423421
let rhs_tt = p.parse_token_tree();
424-
let rhs_tt = mbe::quoted::parse(
425-
&TokenStream::new(vec![rhs_tt]),
426-
false, // RHS
427-
sess,
428-
node_id,
429-
features,
430-
edition,
431-
)
432-
.pop()
433-
.unwrap();
422+
let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
434423
check_emission(check_rhs(sess, &rhs_tt));
435424
check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt));
436-
lhses.push(lhs_tt);
437-
rhses.push(rhs_tt);
425+
let lhs_span = lhs_tt.span();
426+
// Convert the lhs into `MatcherLoc` form, which is better for doing the
427+
// actual matching.
428+
let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
429+
mbe::macro_parser::compute_locs(&delimited.tts)
430+
} else {
431+
return dummy_syn_ext(guar.unwrap());
432+
};
433+
rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt });
438434
if p.token == token::Eof {
439435
break;
440436
}
@@ -443,7 +439,7 @@ pub fn compile_declarative_macro(
443439
}
444440
}
445441

446-
if lhses.is_empty() {
442+
if rules.is_empty() {
447443
let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
448444
return dummy_syn_ext(guar);
449445
}
@@ -457,48 +453,12 @@ pub fn compile_declarative_macro(
457453
return dummy_syn_ext(guar);
458454
}
459455

460-
// Compute the spans of the macro rules for unused rule linting.
461-
// Also, we are only interested in non-foreign macros.
462-
let rule_spans = if node_id != DUMMY_NODE_ID {
463-
lhses
464-
.iter()
465-
.zip(rhses.iter())
466-
.enumerate()
467-
// If the rhs contains an invocation like compile_error!,
468-
// don't consider the rule for the unused rule lint.
469-
.filter(|(_idx, (_lhs, rhs))| !has_compile_error_macro(rhs))
470-
// We only take the span of the lhs here,
471-
// so that the spans of created warnings are smaller.
472-
.map(|(idx, (lhs, _rhs))| (idx, lhs.span()))
473-
.collect::<Vec<_>>()
474-
} else {
475-
Vec::new()
476-
};
456+
// Return the number of rules for unused rule linting, if this is a local macro.
457+
let nrules = if node_id != DUMMY_NODE_ID { rules.len() } else { 0 };
477458

478-
// Convert the lhses into `MatcherLoc` form, which is better for doing the
479-
// actual matching.
480-
let lhses = lhses
481-
.iter()
482-
.map(|lhs| {
483-
// Ignore the delimiters around the matcher.
484-
match lhs {
485-
mbe::TokenTree::Delimited(.., delimited) => {
486-
mbe::macro_parser::compute_locs(&delimited.tts)
487-
}
488-
_ => sess.dcx().span_bug(span, "malformed macro lhs"),
489-
}
490-
})
491-
.collect();
492-
493-
let expander = Arc::new(MacroRulesMacroExpander {
494-
name: ident,
495-
span,
496-
node_id,
497-
transparency,
498-
lhses,
499-
rhses,
500-
});
501-
(mk_syn_ext(expander), rule_spans)
459+
let expander =
460+
Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules });
461+
(mk_syn_ext(expander), nrules)
502462
}
503463

504464
fn check_lhs_nt_follows(

0 commit comments

Comments
 (0)