Skip to content

Commit 2f9c9ce

Browse files
committed
Auto merge of #143766 - matthiaskrgr:rollup-0x7t69s, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #142391 (rust: library: Add `setsid` method to `CommandExt` trait) - #143302 (`tests/ui`: A New Order [27/N]) - #143303 (`tests/ui`: A New Order [28/28] FINAL PART) - #143568 (std: sys: net: uefi: tcp4: Add timeout support) - #143611 (Mention more APIs in `ParseIntError` docs) - #143661 (chore: Improve how the other suggestions message gets rendered) - #143708 (fix: Include frontmatter in -Zunpretty output ) - #143718 (Make UB transmutes really UB in LLVM) r? `@ghost` `@rustbot` modify labels: rollup try-job: i686-gnu-nopt-1 try-job: test-various
2 parents 9535fee + e43481e commit 2f9c9ce

File tree

111 files changed

+1164
-477
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

111 files changed

+1164
-477
lines changed

compiler/rustc_ast_pretty/src/pprust/state.rs

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
120120
pos += shebang_len;
121121
}
122122

123-
for token in rustc_lexer::tokenize(&text[pos..]) {
123+
for token in rustc_lexer::tokenize(&text[pos..], rustc_lexer::FrontmatterAllowed::Yes) {
124124
let token_text = &text[pos..pos + token.len as usize];
125125
match token.kind {
126126
rustc_lexer::TokenKind::Whitespace => {
@@ -171,6 +171,14 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
171171
})
172172
}
173173
}
174+
rustc_lexer::TokenKind::Frontmatter { .. } => {
175+
code_to_the_left = false;
176+
comments.push(Comment {
177+
style: CommentStyle::Isolated,
178+
lines: vec![token_text.to_string()],
179+
pos: start_bpos + BytePos(pos as u32),
180+
});
181+
}
174182
_ => {
175183
code_to_the_left = true;
176184
}

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -207,9 +207,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
207207
{
208208
// These cases are all UB to actually hit, so don't emit code for them.
209209
// (The size mismatches are reachable via `transmute_unchecked`.)
210-
// We can't use unreachable because that's a terminator, and we
211-
// need something that can be in the middle of a basic block.
212-
bx.assume(bx.cx().const_bool(false))
210+
bx.unreachable_nonterminator();
213211
} else {
214212
// Since in this path we have a place anyway, we can store or copy to it,
215213
// making sure we use the destination place's alignment even if the
@@ -236,14 +234,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
236234
|| operand.layout.is_uninhabited()
237235
|| cast.is_uninhabited()
238236
{
239-
if !operand.layout.is_uninhabited() {
240-
// Since this is known statically and the input could have existed
241-
// without already having hit UB, might as well trap for it.
242-
bx.abort();
243-
}
237+
bx.unreachable_nonterminator();
244238

245-
// Because this transmute is UB, return something easy to generate,
246-
// since it's fine that later uses of the value are probably UB.
239+
// We still need to return a value of the appropriate type, but
240+
// it's already UB so do the easiest thing available.
247241
return OperandValue::poison(bx, cast);
248242
}
249243

compiler/rustc_codegen_ssa/src/traits/builder.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,16 @@ pub trait BuilderMethods<'a, 'tcx>:
136136
) -> Self::Value;
137137
fn unreachable(&mut self);
138138

139+
/// Like [`Self::unreachable`], but for use in the middle of a basic block.
140+
fn unreachable_nonterminator(&mut self) {
141+
// This is the preferred LLVM incantation for this per
142+
// https://llvm.org/docs/Frontend/PerformanceTips.html#other-things-to-consider
143+
// Other backends may override if they have a better way.
144+
let const_true = self.cx().const_bool(true);
145+
let poison_ptr = self.const_poison(self.cx().type_ptr());
146+
self.store(const_true, poison_ptr, Align::ONE);
147+
}
148+
139149
fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
140150
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
141151
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;

compiler/rustc_errors/src/emitter.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2446,17 +2446,22 @@ impl HumanEmitter {
24462446
| DisplaySuggestion::Underline => row_num - 1,
24472447
DisplaySuggestion::None => row_num,
24482448
};
2449-
self.draw_col_separator_end(&mut buffer, row, max_line_num_len + 1);
2449+
if other_suggestions > 0 {
2450+
self.draw_col_separator_no_space(&mut buffer, row, max_line_num_len + 1);
2451+
} else {
2452+
self.draw_col_separator_end(&mut buffer, row, max_line_num_len + 1);
2453+
}
24502454
row_num = row + 1;
24512455
}
24522456
}
24532457
if other_suggestions > 0 {
2458+
self.draw_note_separator(&mut buffer, row_num, max_line_num_len + 1, false);
24542459
let msg = format!(
24552460
"and {} other candidate{}",
24562461
other_suggestions,
24572462
pluralize!(other_suggestions)
24582463
);
2459-
buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
2464+
buffer.append(row_num, &msg, Style::NoStyle);
24602465
}
24612466

24622467
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;

compiler/rustc_lexer/src/lib.rs

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -273,14 +273,15 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
273273
if let Some(input_tail) = input.strip_prefix("#!") {
274274
// Ok, this is a shebang but if the next non-whitespace token is `[`,
275275
// then it may be valid Rust code, so consider it Rust code.
276-
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| {
277-
!matches!(
278-
tok,
279-
TokenKind::Whitespace
280-
| TokenKind::LineComment { doc_style: None }
281-
| TokenKind::BlockComment { doc_style: None, .. }
282-
)
283-
});
276+
let next_non_whitespace_token =
277+
tokenize(input_tail, FrontmatterAllowed::No).map(|tok| tok.kind).find(|tok| {
278+
!matches!(
279+
tok,
280+
TokenKind::Whitespace
281+
| TokenKind::LineComment { doc_style: None }
282+
| TokenKind::BlockComment { doc_style: None, .. }
283+
)
284+
});
284285
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
285286
// No other choice than to consider this a shebang.
286287
return Some(2 + input_tail.lines().next().unwrap_or_default().len());
@@ -303,8 +304,16 @@ pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError>
303304
}
304305

305306
/// Creates an iterator that produces tokens from the input string.
306-
pub fn tokenize(input: &str) -> impl Iterator<Item = Token> {
307-
let mut cursor = Cursor::new(input, FrontmatterAllowed::No);
307+
///
308+
/// When parsing a full Rust document,
309+
/// first [`strip_shebang`] and then allow frontmatters with [`FrontmatterAllowed::Yes`].
310+
///
311+
/// When tokenizing a slice of a document, be sure to disallow frontmatters with [`FrontmatterAllowed::No`]
312+
pub fn tokenize(
313+
input: &str,
314+
frontmatter_allowed: FrontmatterAllowed,
315+
) -> impl Iterator<Item = Token> {
316+
let mut cursor = Cursor::new(input, frontmatter_allowed);
308317
std::iter::from_fn(move || {
309318
let token = cursor.advance_token();
310319
if token.kind != TokenKind::Eof { Some(token) } else { None }

compiler/rustc_lexer/src/tests.rs

Lines changed: 85 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,15 +124,17 @@ fn test_valid_shebang() {
124124
assert_eq!(strip_shebang(input), None);
125125
}
126126

127-
fn check_lexing(src: &str, expect: Expect) {
128-
let actual: String = tokenize(src).map(|token| format!("{:?}\n", token)).collect();
127+
fn check_lexing(src: &str, frontmatter_allowed: FrontmatterAllowed, expect: Expect) {
128+
let actual: String =
129+
tokenize(src, frontmatter_allowed).map(|token| format!("{:?}\n", token)).collect();
129130
expect.assert_eq(&actual)
130131
}
131132

132133
#[test]
133134
fn smoke_test() {
134135
check_lexing(
135136
"/* my source file */ fn main() { println!(\"zebra\"); }\n",
137+
FrontmatterAllowed::No,
136138
expect![[r#"
137139
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 20 }
138140
Token { kind: Whitespace, len: 1 }
@@ -171,6 +173,7 @@ fn comment_flavors() {
171173
/** outer doc block */
172174
/*! inner doc block */
173175
",
176+
FrontmatterAllowed::No,
174177
expect![[r#"
175178
Token { kind: Whitespace, len: 1 }
176179
Token { kind: LineComment { doc_style: None }, len: 7 }
@@ -199,6 +202,7 @@ fn comment_flavors() {
199202
fn nested_block_comments() {
200203
check_lexing(
201204
"/* /* */ */'a'",
205+
FrontmatterAllowed::No,
202206
expect![[r#"
203207
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
204208
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
@@ -210,6 +214,7 @@ fn nested_block_comments() {
210214
fn characters() {
211215
check_lexing(
212216
"'a' ' ' '\\n'",
217+
FrontmatterAllowed::No,
213218
expect![[r#"
214219
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
215220
Token { kind: Whitespace, len: 1 }
@@ -224,6 +229,7 @@ fn characters() {
224229
fn lifetime() {
225230
check_lexing(
226231
"'abc",
232+
FrontmatterAllowed::No,
227233
expect![[r#"
228234
Token { kind: Lifetime { starts_with_number: false }, len: 4 }
229235
"#]],
@@ -234,6 +240,7 @@ fn lifetime() {
234240
fn raw_string() {
235241
check_lexing(
236242
"r###\"\"#a\\b\x00c\"\"###",
243+
FrontmatterAllowed::No,
237244
expect![[r#"
238245
Token { kind: Literal { kind: RawStr { n_hashes: Some(3) }, suffix_start: 17 }, len: 17 }
239246
"#]],
@@ -257,6 +264,7 @@ b"a"
257264
r###"raw"###suffix
258265
br###"raw"###suffix
259266
"####,
267+
FrontmatterAllowed::No,
260268
expect![[r#"
261269
Token { kind: Whitespace, len: 1 }
262270
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
@@ -286,3 +294,78 @@ br###"raw"###suffix
286294
"#]],
287295
)
288296
}
297+
298+
#[test]
299+
fn frontmatter_allowed() {
300+
check_lexing(
301+
r#"
302+
---cargo
303+
[dependencies]
304+
clap = "4"
305+
---
306+
307+
fn main() {}
308+
"#,
309+
FrontmatterAllowed::Yes,
310+
expect![[r#"
311+
Token { kind: Whitespace, len: 1 }
312+
Token { kind: Frontmatter { has_invalid_preceding_whitespace: false, invalid_infostring: false }, len: 38 }
313+
Token { kind: Whitespace, len: 2 }
314+
Token { kind: Ident, len: 2 }
315+
Token { kind: Whitespace, len: 1 }
316+
Token { kind: Ident, len: 4 }
317+
Token { kind: OpenParen, len: 1 }
318+
Token { kind: CloseParen, len: 1 }
319+
Token { kind: Whitespace, len: 1 }
320+
Token { kind: OpenBrace, len: 1 }
321+
Token { kind: CloseBrace, len: 1 }
322+
Token { kind: Whitespace, len: 1 }
323+
"#]],
324+
)
325+
}
326+
327+
#[test]
328+
fn frontmatter_disallowed() {
329+
check_lexing(
330+
r#"
331+
---cargo
332+
[dependencies]
333+
clap = "4"
334+
---
335+
336+
fn main() {}
337+
"#,
338+
FrontmatterAllowed::No,
339+
expect![[r#"
340+
Token { kind: Whitespace, len: 1 }
341+
Token { kind: Minus, len: 1 }
342+
Token { kind: Minus, len: 1 }
343+
Token { kind: Minus, len: 1 }
344+
Token { kind: Ident, len: 5 }
345+
Token { kind: Whitespace, len: 1 }
346+
Token { kind: OpenBracket, len: 1 }
347+
Token { kind: Ident, len: 12 }
348+
Token { kind: CloseBracket, len: 1 }
349+
Token { kind: Whitespace, len: 1 }
350+
Token { kind: Ident, len: 4 }
351+
Token { kind: Whitespace, len: 1 }
352+
Token { kind: Eq, len: 1 }
353+
Token { kind: Whitespace, len: 1 }
354+
Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 3 }, len: 3 }
355+
Token { kind: Whitespace, len: 1 }
356+
Token { kind: Minus, len: 1 }
357+
Token { kind: Minus, len: 1 }
358+
Token { kind: Minus, len: 1 }
359+
Token { kind: Whitespace, len: 2 }
360+
Token { kind: Ident, len: 2 }
361+
Token { kind: Whitespace, len: 1 }
362+
Token { kind: Ident, len: 4 }
363+
Token { kind: OpenParen, len: 1 }
364+
Token { kind: CloseParen, len: 1 }
365+
Token { kind: Whitespace, len: 1 }
366+
Token { kind: OpenBrace, len: 1 }
367+
Token { kind: CloseBrace, len: 1 }
368+
Token { kind: Whitespace, len: 1 }
369+
"#]],
370+
)
371+
}

library/core/src/num/error.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,11 @@ impl From<!> for TryFromIntError {
4545

4646
/// An error which can be returned when parsing an integer.
4747
///
48-
/// This error is used as the error type for the `from_str_radix()` functions
49-
/// on the primitive integer types, such as [`i8::from_str_radix`].
48+
/// For example, this error is returned by the `from_str_radix()` functions
49+
/// on the primitive integer types (such as [`i8::from_str_radix`])
50+
/// and is used as the error type in their [`FromStr`] implementations.
51+
///
52+
/// [`FromStr`]: crate::str::FromStr
5053
///
5154
/// # Potential causes
5255
///

library/std/src/os/unix/process.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,9 @@ pub trait CommandExt: Sealed {
210210
/// intentional difference from the underlying `chroot` system call.)
211211
#[unstable(feature = "process_chroot", issue = "141298")]
212212
fn chroot<P: AsRef<Path>>(&mut self, dir: P) -> &mut process::Command;
213+
214+
#[unstable(feature = "process_setsid", issue = "105376")]
215+
fn setsid(&mut self, setsid: bool) -> &mut process::Command;
213216
}
214217

215218
#[stable(feature = "rust1", since = "1.0.0")]
@@ -260,6 +263,11 @@ impl CommandExt for process::Command {
260263
self.as_inner_mut().chroot(dir.as_ref());
261264
self
262265
}
266+
267+
fn setsid(&mut self, setsid: bool) -> &mut process::Command {
268+
self.as_inner_mut().setsid(setsid);
269+
self
270+
}
263271
}
264272

265273
/// Unix-specific extensions to [`process::ExitStatus`] and

0 commit comments

Comments
 (0)