From c5425ce7cda5e4cc1c4e43c206ac54e13860ecdf Mon Sep 17 00:00:00 2001 From: Jason Newcomb Date: Sun, 20 Apr 2025 18:54:27 -0400 Subject: [PATCH 1/2] Add data structures used by `redundant_clone` --- .github/workflows/clippy_mq.yml | 4 + .github/workflows/clippy_pr.yml | 4 + clippy_data_structures/Cargo.toml | 12 + clippy_data_structures/src/bit_set_2d.rs | 579 ++++++++++ clippy_data_structures/src/bit_slice.rs | 538 +++++++++ clippy_data_structures/src/lib.rs | 86 ++ clippy_data_structures/src/range.rs | 1018 +++++++++++++++++ clippy_data_structures/src/slice_set.rs | 195 ++++ clippy_data_structures/src/sorted.rs | 106 ++ clippy_data_structures/src/traits.rs | 92 ++ clippy_data_structures/tests/bit_slice.rs | 120 ++ clippy_data_structures/tests/bit_slice_2d.rs | 221 ++++ .../tests/growable_bit_set_2d.rs | 490 ++++++++ tests/dogfood.rs | 1 + 14 files changed, 3466 insertions(+) create mode 100644 clippy_data_structures/Cargo.toml create mode 100644 clippy_data_structures/src/bit_set_2d.rs create mode 100644 clippy_data_structures/src/bit_slice.rs create mode 100644 clippy_data_structures/src/lib.rs create mode 100644 clippy_data_structures/src/range.rs create mode 100644 clippy_data_structures/src/slice_set.rs create mode 100644 clippy_data_structures/src/sorted.rs create mode 100644 clippy_data_structures/src/traits.rs create mode 100644 clippy_data_structures/tests/bit_slice.rs create mode 100644 clippy_data_structures/tests/bit_slice_2d.rs create mode 100644 clippy_data_structures/tests/growable_bit_set_2d.rs diff --git a/.github/workflows/clippy_mq.yml b/.github/workflows/clippy_mq.yml index 07d5a08304e8..6ba91cd23566 100644 --- a/.github/workflows/clippy_mq.yml +++ b/.github/workflows/clippy_mq.yml @@ -65,6 +65,10 @@ jobs: if: matrix.host != 'x86_64-unknown-linux-gnu' run: cargo test --features internal -- --skip dogfood + - name: Test clippy_data_structures + run: cargo test + working-directory: clippy_data_structures + - name: Test clippy_lints run: cargo test working-directory: clippy_lints diff --git a/.github/workflows/clippy_pr.yml b/.github/workflows/clippy_pr.yml index 880ebd6e5d5c..88aca34df4fe 100644 --- a/.github/workflows/clippy_pr.yml +++ b/.github/workflows/clippy_pr.yml @@ -41,6 +41,10 @@ jobs: - name: Test run: cargo test --features internal + - name: Test clippy_data_structures + run: cargo test + working-directory: clippy_data_structures + - name: Test clippy_lints run: cargo test working-directory: clippy_lints diff --git a/clippy_data_structures/Cargo.toml b/clippy_data_structures/Cargo.toml new file mode 100644 index 000000000000..1826e9ce31c7 --- /dev/null +++ b/clippy_data_structures/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "clippy_data_structures" +version = "0.0.1" +edition = "2021" + +[dependencies] +arrayvec = { version = "0.7", default-features = false} +smallvec = { version = "1.8.1", features = ["union", "may_dangle", "const_new"] } + +[package.metadata.rust-analyzer] +# This package uses #[feature(rustc_private)] +rustc_private = true diff --git a/clippy_data_structures/src/bit_set_2d.rs b/clippy_data_structures/src/bit_set_2d.rs new file mode 100644 index 000000000000..8cc8bbd0cbfa --- /dev/null +++ b/clippy_data_structures/src/bit_set_2d.rs @@ -0,0 +1,579 @@ +use crate::bit_slice::{BitSlice, Word, final_mask_for_size, word_count_from_bits}; +use crate::range::{self, Len as _, LimitExplicitBounds, SplitAt as _, SubtractFromEdge, WithStride}; +use core::iter; +use core::marker::PhantomData; +use rustc_arena::DroplessArena; +use rustc_index::{Idx, IntoSliceIdx}; + +/// A reference to a two-dimensional bit set. +/// +/// This is represented as a dense array of words stored in row major order with each row aligned to +/// the start of a word. +pub struct BitSlice2d<'a, R, C> { + words: &'a mut [Word], + rows: u32, + columns: u32, + row_stride: u32, + phantom: PhantomData<(R, C)>, +} +impl<'a, R, C> BitSlice2d<'a, R, C> { + /// Interprets `words` as a two-dimensional bit set of the given size. + /// + /// The length of the given slice must match the number of words required to store a bit set + /// with the given dimensions. + #[inline] + #[must_use] + #[expect(clippy::cast_possible_truncation)] + pub fn from_mut_words(words: &'a mut [Word], rows: u32, columns: u32) -> Self { + let row_stride = word_count_from_bits(columns as usize); + debug_assert_eq!(Some(words.len()), row_stride.checked_mul(rows as usize)); + Self { + words, + rows, + columns, + row_stride: row_stride as u32, + phantom: PhantomData, + } + } + + /// Allocates a new empty two-dimensional bit set of the given size. + /// + /// # Panics + /// Panics if `rows * columns` overflows a usize. + #[inline] + #[must_use] + #[expect(clippy::cast_possible_truncation)] + pub fn empty_arena(arena: &'a DroplessArena, rows: u32, columns: u32) -> Self { + let row_stride = word_count_from_bits(columns as usize); + Self { + words: arena.alloc_from_iter(iter::repeat_n(0, row_stride.checked_mul(rows as usize).unwrap())), + rows, + columns, + row_stride: row_stride as u32, + phantom: PhantomData, + } + } + + /// Gets the number of rows. + #[inline] + #[must_use] + pub const fn row_len(&self) -> u32 { + self.rows + } + + /// Gets the number of columns. + #[inline] + #[must_use] + pub const fn column_len(&self) -> u32 { + self.columns + } + + /// Get the backing slice of words. + #[inline] + #[must_use] + pub const fn words(&self) -> &[Word] { + self.words + } + + /// Get the backing slice of words. + #[inline] + #[must_use] + pub fn words_mut(&mut self) -> &mut [Word] { + self.words + } + + /// Creates an iterator over the given rows. + /// + /// # Panics + /// Panics if the range exceeds the number of rows. + #[inline] + #[must_use] + #[track_caller] + pub fn iter_rows( + &self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> + Clone { + self.words[range.into_slice_idx().with_stride(self.row_stride)] + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + } + + /// Creates an iterator over the given rows. + /// + /// # Panics + /// Panics if the range exceeds the number of rows. + #[inline] + #[must_use] + #[track_caller] + pub fn iter_mut_rows( + &mut self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> { + self.words[range.into_slice_idx().with_stride(self.row_stride)] + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + } + + /// Checks if the set is empty. + #[inline] + #[must_use] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + /// Counts the number of elements in the set. + #[inline] + #[must_use] + pub fn count(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + /// Remove all elements from the set. + #[inline] + pub fn clear(&mut self) { + self.words.fill(0); + } + + /// Inserts all elements into the set. + #[inline] + pub fn insert_all(&mut self) { + self.words.fill(!0); + let mask = final_mask_for_size(self.columns as usize); + for row in self.iter_mut_rows(..) { + row.mask_final_word(mask); + } + } + + /// Performs a union of two sets storing the result in `self`. Returns `true` if `self` has + /// changed. + /// + /// # Panics + /// Panics if the sets contain a different number of either rows or columns. + pub fn union(&mut self, other: &BitSlice2d<'_, R, C>) -> bool { + assert_eq!(self.rows, other.rows); + assert_eq!(self.columns, other.columns); + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst |= *src; + res || prev != *dst + }) + } + + /// Performs a subtraction of other from `self` storing the result in `self`. Returns `true` if + /// `self` has changed. + /// + /// # Panics + /// Panics if the sets contain a different number of either rows or columns. + pub fn subtract(&mut self, other: &BitSlice2d<'_, R, C>) -> bool { + assert_eq!(self.rows, other.rows); + assert_eq!(self.columns, other.columns); + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst &= !*src; + res || prev != *dst + }) + } + + /// Performs an intersection of two sets storing the result in `self`. Returns `true` if `self` + /// has changed. + /// + /// # Panics + /// Panics if the sets contain a different number of either rows or columns. + pub fn intersect(&mut self, other: &BitSlice2d<'_, R, C>) -> bool { + assert_eq!(self.rows, other.rows); + assert_eq!(self.columns, other.columns); + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst &= *src; + res || prev != *dst + }) + } +} +impl BitSlice2d<'_, R, C> { + /// Creates an iterator which enumerates all rows. + #[inline] + #[must_use] + pub fn enumerate_rows(&self) -> impl ExactSizeIterator)> + Clone { + self.words + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + /// Creates an iterator which enumerates all rows. + #[inline] + pub fn enumerate_rows_mut(&mut self) -> impl ExactSizeIterator)> { + self.words + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + /// Gets a reference to the given row. + /// + /// # Panics + /// Panics if the row greater than the number of rows. + #[inline] + #[track_caller] + pub fn row(&self, row: R) -> &BitSlice { + assert!(row.index() < self.rows as usize); + let start = self.row_stride as usize * row.index(); + BitSlice::from_words(&self.words[start..start + self.row_stride as usize]) + } + + /// Gets a reference to the given row. + /// + /// # Panics + /// Panics if the row greater than the number of rows. + #[inline] + #[track_caller] + pub fn row_mut(&mut self, row: R) -> &mut BitSlice { + assert!(row.index() < self.rows as usize); + let start = self.row_stride as usize * row.index(); + BitSlice::from_words_mut(&mut self.words[start..start + self.row_stride as usize]) + } + + /// Copies a range of rows to another part of the bitset. + /// + /// # Panics + /// Panics if either the source or destination range exceeds the number of rows. + #[inline] + #[track_caller] + pub fn copy_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let src = src.into_slice_idx().with_stride(self.row_stride); + self.words.copy_within(src, dst.index() * self.row_stride as usize); + } + + /// Moves a range of rows to another part of the bitset leaving empty rows behind. + /// + /// # Panics + /// Panics if either the source or destination range exceeds the number of rows. + #[inline] + #[track_caller] + pub fn move_rows( + &mut self, + src: impl IntoSliceIdx>, + dst: R, + ) { + let src = src.into_slice_idx().with_stride(self.row_stride); + let dst_start = dst.index() * self.row_stride as usize; + self.words.copy_within(src.clone(), dst_start); + let src_len = src.len(); + self.words[src.subtract_from_edge(dst_start..dst_start + src_len)].fill(0); + } + + /// Clears all elements from a range of rows. + /// + /// # Panics + /// Panics if the range exceeds the number of rows. + #[inline] + #[track_caller] + pub fn clear_rows(&mut self, rows: impl IntoSliceIdx) { + let words = &mut self.words[rows.into_slice_idx().with_stride(self.row_stride)]; + words.fill(0); + } +} + +impl PartialEq for BitSlice2d<'_, R, C> { + fn eq(&self, other: &Self) -> bool { + self.columns == other.columns && self.rows == other.rows && self.words == other.words + } +} +impl Eq for BitSlice2d<'_, R, C> {} + +/// A two-dimensional bit set with a fixed number of columns and a dynamic number of rows. +/// +/// This is represented as a dense array of words stored in row major order with each row aligned to +/// the start of a word. Any row not physically stored will be treated as though it contains no +/// items and storage for the row (and all previous rows) will be allocated as needed to store +/// values. In effect this will behave as though it had the maximum number of rows representable by +/// `R`. +pub struct GrowableBitSet2d { + words: Vec, + rows: u32, + columns: u32, + row_stride: u32, + phantom: PhantomData<(R, C)>, +} +impl GrowableBitSet2d { + /// Creates a new bit set with the given number of columns without allocating any storage. + #[inline] + #[must_use] + #[expect(clippy::cast_possible_truncation)] + pub const fn new(columns: u32) -> Self { + Self { + words: Vec::new(), + rows: 0, + columns, + row_stride: word_count_from_bits(columns as usize) as u32, + phantom: PhantomData, + } + } + + /// Gets the number of rows for which values are currently stored. + #[inline] + #[must_use] + pub const fn row_len(&self) -> u32 { + self.rows + } + + /// Gets the number of columns. + #[inline] + #[must_use] + pub const fn column_len(&self) -> u32 { + self.columns + } + + /// Get the backing slice of currently stored words. + #[inline] + #[must_use] + pub fn words(&self) -> &[Word] { + self.words.as_slice() + } + + /// Get the backing slice of currently stored words. + #[inline] + #[must_use] + pub fn words_mut(&mut self) -> &mut [Word] { + self.words.as_mut_slice() + } + + /// Checks if the set is empty. + #[inline] + #[must_use] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + /// Creates an iterator over a range of stored rows. Any unstored rows within the range will be + /// silently ignored. + #[inline] + #[must_use] + pub fn iter_rows( + &self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> + Clone { + self.words[range + .into_slice_idx() + .limit_explicit_bounds(self.rows as usize) + .with_stride(self.row_stride)] + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + } + + /// Creates an iterator over a range of stored rows. Any unstored rows within the range will be + /// silently ignored. + #[inline] + #[must_use] + pub fn iter_mut_rows( + &mut self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> { + self.words[range + .into_slice_idx() + .limit_explicit_bounds(self.rows as usize) + .with_stride(self.row_stride)] + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + } + + /// Counts the number of elements in the set. + #[inline] + #[must_use] + pub fn count(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + /// Removes all items in the set and resets the number of stored rows to zero. + /// + /// This will not deallocate any currently allocated storage. + #[inline] + pub fn clear(&mut self) { + self.words.clear(); + self.rows = 0; + } + + /// Performs a union of two sets storing the result in `self`. Returns `true` if `self` has + /// changed. + /// + /// The number of rows stored in `self` will be extended if needed. + /// + /// # Panics + /// Panics if the sets contain a different number of columns. + pub fn union(&mut self, other: &Self) -> bool { + assert_eq!(self.columns, other.columns); + if self.rows < other.rows { + self.words.resize(other.row_stride as usize * other.rows as usize, 0); + self.rows = other.rows; + } + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst |= *src; + res || prev != *dst + }) + } +} +impl GrowableBitSet2d { + /// Creates an iterator which enumerates all stored rows. + #[inline] + #[must_use] + pub fn enumerate_rows(&self) -> impl ExactSizeIterator)> + Clone { + self.words + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + /// Creates an iterator which enumerates all stored rows. + #[inline] + #[must_use] + pub fn enumerate_mut_rows(&mut self) -> impl ExactSizeIterator)> { + self.words + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + /// Gets a reference to a row if the row is stored, or `None` if it is not. + #[inline] + pub fn opt_row(&self, row: R) -> Option<&BitSlice> { + let start = self.row_stride as usize * row.index(); + self.words + .get(start..start + self.row_stride as usize) + .map(BitSlice::from_words) + } + + /// Gets a reference to a row, allocating storage for it if needed. + /// + /// This will also allocate storage for all previous rows. + #[inline] + #[expect(clippy::cast_possible_truncation)] + pub fn ensure_row(&mut self, row: R) -> &mut BitSlice { + let start = self.row_stride as usize * row.index(); + let end = start + self.row_stride as usize; + BitSlice::from_words_mut(if self.words.get_mut(start..end).is_some() { + // Can't use the borrow from before due to borrow checking errors. + &mut self.words[start..end] + } else { + self.words.resize(end, 0); + self.rows = row.index() as u32 + 1; + &mut self.words[start..end] + }) + } + + /// Clears all elements from a range of rows. + /// + /// Any unstored rows referenced by the range will be silently ignored. + #[inline] + pub fn clear_rows(&mut self, rows: impl IntoSliceIdx) { + self.words[rows + .into_slice_idx() + .limit_explicit_bounds(self.rows as usize) + .with_stride(self.row_stride)] + .fill(0); + } + + /// Copies a range of rows to another part of the bitset. + /// + /// All unstored rows in the source range will be treated as though they were empty. All + /// unstored rows in the destination range with a corresponding stored row in the source range + /// will be allocated. + #[expect(clippy::cast_possible_truncation)] + pub fn copy_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let (src_range, src_extra) = src.into_slice_idx().split_at(self.rows as usize); + let src_row_len = src_range.len(); + if src_row_len == 0 { + let range = (dst.index()..dst.index() + src_extra) + .with_stride(self.row_stride) + .limit_explicit_bounds(self.words.len()); + self.words[range].fill(0); + } else { + let dst_row_end = dst.index() + src_row_len; + let dst_start = dst.index() * self.row_stride as usize; + let src_range = src_range.with_stride(self.row_stride); + let dst_copy_end = dst_start + src_range.len(); + if self.rows < dst_row_end as u32 { + self.words.resize(dst_copy_end, 0); + self.rows = dst_row_end as u32; + } + self.words.copy_within(src_range, dst_start); + let dst_end = self + .words + .len() + .min(dst_copy_end + src_extra * self.row_stride as usize); + self.words[dst_copy_end..dst_end].fill(0); + } + } + + /// Moves a range of rows to another part of the bitset leaving empty rows behind. + /// + /// All unstored rows in the source range will be treated as though they were empty. All + /// unstored rows in the destination range with a corresponding stored row in the source range + /// will be allocated. + #[expect(clippy::cast_possible_truncation)] + pub fn move_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let (src_range, src_extra) = src.into_slice_idx().split_at(self.rows as usize); + let src_row_len = src_range.len(); + if src_row_len == 0 { + let range = (dst.index()..dst.index() + src_extra) + .with_stride(self.row_stride) + .limit_explicit_bounds(self.words.len()); + self.words[range].fill(0); + } else { + let dst_row_end = dst.index() + src_row_len; + let dst_start = dst.index() * self.row_stride as usize; + let src_range = src_range.with_stride(self.row_stride); + let dst_copy_end = dst_start + src_range.len(); + if self.rows < dst_row_end as u32 { + self.words.resize(dst_copy_end, 0); + self.rows = dst_row_end as u32; + } + self.words.copy_within(src_range.clone(), dst_start); + let dst_end = self + .words + .len() + .min(dst_copy_end + src_extra * self.row_stride as usize); + self.words[dst_copy_end..dst_end].fill(0); + self.words[src_range.subtract_from_edge(dst_start..dst_end)].fill(0); + } + } +} + +impl PartialEq for GrowableBitSet2d { + fn eq(&self, other: &Self) -> bool { + assert_eq!(self.columns, other.columns); + let (lhs, rhs, extra) = if let Some((lhs, extra)) = self.words.split_at_checked(other.words.len()) { + (lhs, other.words.as_slice(), extra) + } else { + let (rhs, extra) = other.words.split_at(self.words.len()); + (self.words.as_slice(), rhs, extra) + }; + lhs == rhs && extra.iter().all(|&x| x == 0) + } +} +impl Eq for GrowableBitSet2d {} + +impl Clone for GrowableBitSet2d { + #[inline] + fn clone(&self) -> Self { + Self { + words: self.words.clone(), + rows: self.rows, + columns: self.columns, + row_stride: self.row_stride, + phantom: PhantomData, + } + } + + #[inline] + fn clone_from(&mut self, source: &Self) { + self.words.clone_from(&source.words); + self.rows = source.rows; + self.columns = source.columns; + self.row_stride = source.row_stride; + } +} diff --git a/clippy_data_structures/src/bit_slice.rs b/clippy_data_structures/src/bit_slice.rs new file mode 100644 index 000000000000..ff0114d41583 --- /dev/null +++ b/clippy_data_structures/src/bit_slice.rs @@ -0,0 +1,538 @@ +use core::marker::PhantomData; +use core::mem::{self, transmute}; +use core::ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::slice::{self, SliceIndex}; +use core::{iter, range}; +use rustc_arena::DroplessArena; +use rustc_index::{Idx, IntoSliceIdx}; + +pub type Word = usize; +pub const WORD_BITS: usize = Word::BITS as usize; +#[expect(clippy::unnecessary_cast)] +pub const MAX_WORDS: usize = Word::MAX as usize / WORD_BITS; + +#[inline] +#[must_use] +#[expect(clippy::manual_div_ceil, reason = "worse codegen")] +pub const fn word_count_from_bits(bits: usize) -> usize { + (bits + (WORD_BITS - 1)) / WORD_BITS +} + +/// Gets the mask used to remove out-of-range bits from the final word. +#[inline] +#[must_use] +pub const fn final_mask_for_size(bits: usize) -> Word { + (!(!(0 as Word) << (bits % WORD_BITS))).wrapping_sub((bits % WORD_BITS == 0) as Word) +} + +pub struct BitRange { + /// The range of affected words. + words: R, + /// The amount to shift to make a bit-mask for the first word. + first_shift: u8, + /// The amount to shift to make a bit-mask for the last word. + last_shift: u8, +} +impl BitRange { + #[inline] + const fn first_mask(&self) -> Word { + !0 << self.first_shift + } + + #[inline] + const fn last_mask(&self) -> Word { + !0 >> self.last_shift + } +} + +pub trait IntoBitRange: Sized { + type Range: SliceIndex<[Word], Output = [Word]>; + fn into_bit_range(self) -> BitRange; +} +impl IntoBitRange for RangeFull { + type Range = Self; + #[inline] + fn into_bit_range(self) -> BitRange { + BitRange { + words: self, + first_shift: 0, + last_shift: 0, + } + } +} +impl IntoBitRange for Range { + type Range = Self; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: Range { + start: start.word, + end: end.word + usize::from(end.bit != 0), + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for RangeFrom { + type Range = Self; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + BitRange { + words: RangeFrom { start: start.word }, + first_shift: start.bit as u8, + last_shift: 0, + } + } +} +impl IntoBitRange for RangeTo { + type Range = Self; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let end = BitIdx::from_bit(self.end); + BitRange { + words: RangeTo { + end: end.word + usize::from(end.bit != 0), + }, + first_shift: 0, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for RangeInclusive { + type Range = Range; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(*self.start()); + let end = BitIdx::from_bit(*self.end()); + BitRange { + words: Range { + start: start.word, + end: end.word + 1, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} +impl IntoBitRange for RangeToInclusive { + type Range = RangeTo; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let end = BitIdx::from_bit(self.end); + BitRange { + words: RangeTo { end: end.word + 1 }, + first_shift: 0, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} +impl IntoBitRange for range::Range { + type Range = range::Range; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: range::Range { + start: start.word, + end: end.word + usize::from(end.bit != 0), + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for range::RangeFrom { + type Range = range::RangeFrom; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + BitRange { + words: range::RangeFrom { start: start.word }, + first_shift: start.bit as u8, + last_shift: 0, + } + } +} +impl IntoBitRange for range::RangeInclusive { + type Range = range::Range; + #[inline] + #[expect(clippy::cast_possible_truncation)] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: range::Range { + start: start.word, + end: end.word + 1, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} + +struct BitIdx { + word: usize, + bit: usize, +} +impl BitIdx { + #[inline] + fn from_bit(bit: T) -> Self { + let bit = bit.index(); + Self { + word: bit / WORD_BITS, + bit: bit % WORD_BITS, + } + } + + #[inline] + fn word_mask(&self) -> Word { + 1 << self.bit + } +} + +/// A bit set represented as a dense slice of words. +/// +/// n.b. This can only hold bits as a multiple of `WORD_SIZE`. Use +/// `mask_final_word(final_mask_for_size(len))` to clear the final bits greater than or equal to +/// `len`. +#[repr(transparent)] +pub struct BitSlice { + phantom: PhantomData, + pub words: [Word], +} +impl BitSlice { + /// Interprets `words` as a bit set of the same size. + #[inline] + #[must_use] + pub const fn from_words(words: &[Word]) -> &Self { + // Not actually a safety requirement since everything will be checked by the slice on use. + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::<&[Word], &Self>(words) } + } + + /// Interprets `words` as a bit set of the same size. + #[inline] + #[expect(clippy::transmute_ptr_to_ptr)] + pub fn from_words_mut(words: &mut [Word]) -> &mut Self { + // Not actually a safety requirement since everything will be checked by the slice on use. + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::<&mut [Word], &mut Self>(words) } + } + + /// Interprets `words` as a bit set of the same size. + #[inline] + #[must_use] + pub fn from_boxed_words(words: Box<[Word]>) -> Box { + // Not actually a safety requirement since everything will be checked by the slice on use. + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::, Box>(words) } + } + + /// Gets the size of this slice in bits. + #[inline] + #[must_use] + pub const fn bit_len(&self) -> usize { + self.words.len() * WORD_BITS + } + + /// Checks if the set is empty. + #[inline] + #[must_use] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + /// Counts the number of elements in the set. + #[inline] + #[must_use] + pub fn count(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + /// Allocates a new empty boxed bit set of the given size rounded up to the nearest word size. + #[inline] + #[must_use] + pub fn empty_box(bits: usize) -> Box { + Self::from_boxed_words(vec![0; word_count_from_bits(bits)].into_boxed_slice()) + } + + /// Allocates a new empty bit set of the given size rounded up to the nearest word size. + #[inline] + pub fn empty_arena(arena: &DroplessArena, bits: usize) -> &mut Self { + Self::from_words_mut(arena.alloc_from_iter(iter::repeat_n(0, word_count_from_bits(bits)))) + } + + /// Applies a bit-mask to the final word of the slice. + #[inline] + pub fn mask_final_word(&mut self, mask: Word) { + if let Some(word) = self.words.last_mut() { + *word &= mask; + } + } + + /// Fills the entire set. + /// + /// n.b. This can only work with whole `Word`s. Use `mask_final_word(final_mask_for_size(len))` + /// to clear the final bits greater than or equal to `len`. + #[inline] + pub fn fill(&mut self) { + self.words.fill(!0); + } + + /// Remove all elements from the set. + #[inline] + pub fn clear(&mut self) { + self.words.fill(0); + } + + /// Performs a union of two sets storing the result in `self`. Returns `true` if `self` has + /// changed. + /// + /// Note: The result will be truncated to the number of bits contained in `self` + pub fn union_trunc(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs |= *rhs; + prev != *lhs || res + }) + } + + /// Performs an intersection of two sets storing the result in `self`. Returns `true` if `self` + /// has changed. + pub fn intersect(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs &= *rhs; + prev != *lhs || res + }) + } + + /// Performs a subtraction of other from `self` storing the result in `self`. Returns `true` if + /// `self` has changed. + pub fn subtract(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs &= !*rhs; + prev != *lhs || res + }) + } +} +impl BitSlice { + /// Inserts the given element into the set. Returns `true` if `self` has changed. + /// + /// # Panics + /// Panics if the element lies outside the bounds of this slice. + #[inline] + #[track_caller] + pub fn insert(&mut self, bit: T) -> bool { + let idx = BitIdx::from_bit(bit); + let res = self.words[idx.word] & idx.word_mask() == 0; + self.words[idx.word] |= idx.word_mask(); + res + } + + /// Removes the given element from the set. Returns `true` if `self` has changed. + /// + /// # Panics + /// Panics if the element lies outside the bounds of this slice. + #[inline] + #[track_caller] + pub fn remove(&mut self, bit: T) -> bool { + let idx = BitIdx::from_bit(bit); + let res = self.words[idx.word] & idx.word_mask() != 0; + self.words[idx.word] &= !idx.word_mask(); + res + } + + /// Checks if the set contains the given element. + /// + /// # Panics + /// Panics if the element lies outside the bounds of this slice. + #[inline] + #[track_caller] + pub fn contains(&self, bit: T) -> bool { + let idx = BitIdx::from_bit(bit); + self.words.get(idx.word).map_or(0, |&x| x) & idx.word_mask() != 0 + } + + /// Inserts the given range of elements into the slice. + /// + /// # Panics + /// Panics if the range exceeds the bounds of this slice. + #[track_caller] + pub fn insert_range(&mut self, range: impl IntoSliceIdx) { + let range = range.into_slice_idx().into_bit_range(); + let first = range.first_mask(); + let last = range.last_mask(); + match &mut self.words[range.words] { + [] => {}, + [dst] => *dst |= first & last, + [first_dst, dst @ .., last_dst] => { + *first_dst |= first; + dst.fill(!0); + *last_dst |= last; + }, + } + } + + /// Creates an iterator over all items in the set. + #[inline] + #[must_use] + pub fn iter(&self) -> Iter<'_, T> { + Iter::new(&self.words) + } + + /// Creates an iterator which returns and removes all items in the set. + /// + /// If the iterator is dropped before it is fully consumed all remaining items in the set will + /// be removed. + #[inline] + #[must_use] + pub fn drain(&mut self) -> Drain<'_, T> { + Drain::new(&mut self.words) + } +} + +impl Extend for &mut BitSlice { + fn extend>(&mut self, iter: Iter) { + for i in iter { + self.insert(i); + } + } +} + +impl<'a, T: Idx> IntoIterator for &'a BitSlice { + type Item = T; + type IntoIter = Iter<'a, T>; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + Iter::new(&self.words) + } +} + +/// Iterator over the set bits in a single word. +#[derive(Default, Clone)] +pub struct WordBitIter(Word); +impl WordBitIter { + #[inline] + #[must_use] + pub const fn new(word: Word) -> Self { + Self(word) + } +} +impl Iterator for WordBitIter { + type Item = u32; + #[inline] + fn next(&mut self) -> Option { + if self.0 == 0 { + None + } else { + let bit_pos = self.0.trailing_zeros(); + self.0 ^= 1 << bit_pos; + Some(bit_pos) + } + } +} + +// Copied from `rustc_data_structures::bit_set`. +pub struct Iter<'a, T: Idx> { + /// Iterator over a single word. + word: WordBitIter, + + /// The offset (measured in bits) of the current word. + offset: usize, + + /// Underlying iterator over the words. + inner: slice::Iter<'a, Word>, + + marker: PhantomData, +} +impl<'a, T: Idx> Iter<'a, T> { + #[inline] + fn new(words: &'a [Word]) -> Self { + // We initialize `word` and `offset` to degenerate values. On the first + // call to `next()` we will fall through to getting the first word from + // `iter`, which sets `word` to the first word (if there is one) and + // `offset` to 0. Doing it this way saves us from having to maintain + // additional state about whether we have started. + Self { + word: WordBitIter::new(0), + offset: usize::MAX - (WORD_BITS - 1), + inner: words.iter(), + marker: PhantomData, + } + } +} +impl Iterator for Iter<'_, T> { + type Item = T; + fn next(&mut self) -> Option { + loop { + if let Some(idx) = self.word.next() { + return Some(T::new(idx as usize + self.offset)); + } + + // Move onto the next word. `wrapping_add()` is needed to handle + // the degenerate initial value given to `offset` in `new()`. + self.word = WordBitIter::new(*self.inner.next()?); + self.offset = self.offset.wrapping_add(WORD_BITS); + } + } +} + +pub struct Drain<'a, T> { + word: WordBitIter, + offset: usize, + iter: slice::IterMut<'a, Word>, + marker: PhantomData, +} +impl<'a, T> Drain<'a, T> { + #[inline] + fn new(words: &'a mut [Word]) -> Self { + Self { + word: WordBitIter::new(0), + offset: usize::MAX - (WORD_BITS - 1), + iter: words.iter_mut(), + marker: PhantomData, + } + } +} +impl Drop for Drain<'_, T> { + #[inline] + fn drop(&mut self) { + for x in &mut self.iter { + *x = 0; + } + } +} +impl Iterator for Drain<'_, T> { + type Item = T; + fn next(&mut self) -> Option { + loop { + if let Some(idx) = self.word.next() { + return Some(T::new(idx as usize + self.offset)); + } + + // Move onto the next word. `wrapping_add()` is needed to handle + // the degenerate initial value given to `offset` in `new()`. + self.word = WordBitIter::new(mem::replace(self.iter.next()?, 0)); + self.offset = self.offset.wrapping_add(WORD_BITS); + } + } +} diff --git a/clippy_data_structures/src/lib.rs b/clippy_data_structures/src/lib.rs new file mode 100644 index 000000000000..c84b8aa75a58 --- /dev/null +++ b/clippy_data_structures/src/lib.rs @@ -0,0 +1,86 @@ +#![feature( + array_windows, + cmp_minmax, + if_let_guard, + maybe_uninit_slice, + min_specialization, + new_range_api, + rustc_private, + slice_partition_dedup +)] + +extern crate rustc_arena; +extern crate rustc_driver; +extern crate rustc_index; +extern crate rustc_mir_dataflow; + +use core::ops::RangeBounds; + +mod range; +mod sorted; +mod traits; + +pub mod bit_slice; +pub use bit_slice::BitSlice; + +pub mod bit_set_2d; +pub use bit_set_2d::{BitSlice2d, GrowableBitSet2d}; + +mod slice_set; +pub use slice_set::SliceSet; + +/// An iterator where the size hint is provided by calling `Iterator::count`. +pub struct CountedIter(pub T); +impl Iterator for CountedIter +where + T: Iterator + Clone, +{ + type Item = T::Item; + fn next(&mut self) -> Option { + self.0.next() + } + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + fn count(self) -> usize { + self.0.count() + } + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } + fn size_hint(&self) -> (usize, Option) { + let size = self.0.clone().count(); + (size, Some(size)) + } +} + +/// Moves items within the slice leaving behind the default value at indices from the source range +/// which are not also part of the destination range. +#[inline] +pub fn move_within_slice( + slice: &mut [impl Copy + Default], + src: impl Clone + RangeBounds + range::Len + range::SubtractFromEdge, + dst: usize, +) { + slice.copy_within(src.clone(), dst); + let src_len = src.len(); + for x in &mut slice[src.subtract_from_edge(dst..dst + src_len)] { + *x = Default::default(); + } +} + +#[test] +fn test_move_within_slice() { + let slice = &mut [0, 1, 2, 3, 4]; + move_within_slice(slice, 0..2, 2); + assert_eq!(slice, &[0, 0, 0, 1, 4]); + move_within_slice(slice, 3..5, 3); + assert_eq!(slice, &[0, 0, 0, 1, 4]); + move_within_slice(slice, 3..5, 2); + assert_eq!(slice, &[0, 0, 1, 4, 0]); + move_within_slice(slice, 2..4, 3); + assert_eq!(slice, &[0, 0, 0, 1, 4]); +} diff --git a/clippy_data_structures/src/range.rs b/clippy_data_structures/src/range.rs new file mode 100644 index 000000000000..6f84df7e4ff9 --- /dev/null +++ b/clippy_data_structures/src/range.rs @@ -0,0 +1,1018 @@ +use crate::bit_slice::Word; +use core::cmp::minmax; +use core::ops::{Bound, Range, RangeBounds, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::range; +use core::slice::SliceIndex; + +/// Gets the total number of steps in a range. +pub trait Len { + /// Gets the total number of steps in a range. + fn len(&self) -> usize; +} +impl Len for usize { + #[inline] + fn len(&self) -> usize { + 1 + } +} +impl Len for Range { + #[inline] + fn len(&self) -> usize { + self.end - self.start + } +} +impl Len for range::Range { + #[inline] + fn len(&self) -> usize { + self.end - self.start + } +} +impl Len for RangeTo { + #[inline] + fn len(&self) -> usize { + self.end + } +} + +/// Removes items from the current range which overlap with another. +/// +/// The other range must either start before or at the current range, or it must end at or after the +/// current range. i.e. `other.start <= self.start || self.end <= other.end` +pub trait SubtractFromEdge { + /// Removes items from the current range which overlap with another. + fn subtract_from_edge(self, other: Range) -> Range; +} +impl SubtractFromEdge for usize { + #[inline] + fn subtract_from_edge(self, other: Range) -> Range { + Range { + start: self, + end: self + usize::from(other.contains(&self)), + } + } +} +impl SubtractFromEdge for Range { + #[inline] + fn subtract_from_edge(self, other: Range) -> Range { + debug_assert!(other.start <= self.start || self.end <= other.end); + let (start, end) = if other.start <= self.start { + (self.start.max(other.end).min(self.end), self.end) + } else { + (self.start, self.end.min(other.start)) + }; + Range { start, end } + } +} +impl SubtractFromEdge for range::Range { + #[inline] + fn subtract_from_edge(self, other: Range) -> Range { + debug_assert!(other.start <= self.start || self.end <= other.end); + let (start, end) = if other.start <= self.start { + (self.start.max(other.end).min(self.end), self.end) + } else { + (self.start, self.end.min(other.start)) + }; + Range { start, end } + } +} +impl SubtractFromEdge for RangeTo { + #[inline] + fn subtract_from_edge(self, other: Range) -> Range { + debug_assert!(other.start == 0 || self.end <= other.end); + let (start, end) = if other.start == 0 { + (other.end.min(self.end), self.end) + } else { + (0, self.end.min(other.start)) + }; + Range { start, end } + } +} + +/// Applies an exclusive upper limit to any explicit bounds in a range leaving implicit bounds +/// unchanged. +pub trait LimitExplicitBounds { + type Output: Clone + + SliceIndex<[Word], Output = [Word]> + + RangeBounds + + LimitExplicitBounds + + WithStride; + /// Applies an exclusive upper limit to any explicit bounds in a range leaving implicit bounds + /// unchanged. + fn limit_explicit_bounds(self, limit: usize) -> Self::Output; +} +impl LimitExplicitBounds for usize { + type Output = Range; + #[inline] + #[expect(clippy::range_plus_one)] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + if self < limit { self..self + 1 } else { limit..limit } + } +} +impl LimitExplicitBounds for RangeFull { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, _: usize) -> Self::Output { + self + } +} +impl LimitExplicitBounds for Range { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + end: self.end.min(limit), + } + } +} +impl LimitExplicitBounds for range::Range { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + end: self.end.min(limit), + } + } +} +impl LimitExplicitBounds for RangeInclusive { + type Output = Range; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Range { + start: (*self.start()).min(limit), + end: if *self.end() < limit { + match self.end_bound() { + Bound::Included(&x) => x + 1, + Bound::Excluded(&x) => x, + Bound::Unbounded => unreachable!(), + } + } else { + limit + }, + } + } +} +impl LimitExplicitBounds for range::RangeInclusive { + type Output = range::Range; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + range::Range { + start: self.start.min(limit), + end: if self.end < limit { self.end + 1 } else { limit }, + } + } +} +impl LimitExplicitBounds for RangeTo { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Self { + end: self.end.min(limit), + } + } +} +impl LimitExplicitBounds for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + RangeTo { + end: if self.end < limit { self.end + 1 } else { limit }, + } + } +} +impl LimitExplicitBounds for RangeFrom { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + } + } +} +impl LimitExplicitBounds for range::RangeFrom { + type Output = Self; + #[inline] + fn limit_explicit_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + } + } +} + +/// Adjusts a range/index to contain each item as though they were `n` steps apart (i.e. multiplies +/// the bounds by `n`). +pub trait WithStride { + type Output: Clone + + SliceIndex<[Word], Output = [Word]> + + RangeBounds + + LimitExplicitBounds + + WithStride; + fn with_stride(self, stride: u32) -> Self::Output; +} +impl WithStride for usize { + type Output = Range; + fn with_stride(self, stride: u32) -> Self::Output { + let start = self * stride as usize; + Range { + start, + end: start + stride as usize, + } + } +} +impl WithStride for RangeFull { + type Output = Self; + #[inline] + fn with_stride(self, _: u32) -> Self::Output { + self + } +} +impl WithStride for Range { + type Output = Self; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + Range { + start: self.start * stride as usize, + end: self.end * stride as usize, + } + } +} +impl WithStride for range::Range { + type Output = Self; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + range::Range { + start: self.start * stride as usize, + end: self.end * stride as usize, + } + } +} +impl WithStride for RangeInclusive { + type Output = Range; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + Range { + start: *self.start() * stride as usize, + end: (*self.end() + 1) * stride as usize, + } + } +} +impl WithStride for range::RangeInclusive { + type Output = range::Range; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + range::Range { + start: self.start * stride as usize, + end: (self.end + 1) * stride as usize, + } + } +} +impl WithStride for RangeFrom { + type Output = Self; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + RangeFrom { + start: self.start * stride as usize, + } + } +} +impl WithStride for range::RangeFrom { + type Output = Self; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + range::RangeFrom { + start: self.start * stride as usize, + } + } +} +impl WithStride for RangeTo { + type Output = Self; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + RangeTo { + end: self.end * stride as usize, + } + } +} +impl WithStride for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn with_stride(self, stride: u32) -> Self::Output { + RangeTo { + end: (self.end + 1) * stride as usize, + } + } +} + +/// Splits a range/index into a range before `n`, and the number of steps after `n`. +pub trait SplitAt { + type Output: Clone + + SliceIndex<[Word], Output = [Word]> + + RangeBounds + + Len + + LimitExplicitBounds + + SubtractFromEdge + + WithStride; + fn split_at(self, idx: usize) -> (Self::Output, usize); +} +impl SplitAt for usize { + type Output = Range; + #[inline] + #[expect(clippy::range_plus_one)] + fn split_at(self, idx: usize) -> (Self::Output, usize) { + if self < idx { (self..self + 1, 0) } else { (idx..idx, 1) } + } +} +impl SplitAt for Range { + type Output = Range; + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_start, post_start] = minmax(self.start, idx); + let [pre_end, post_end] = minmax(self.end, idx); + ( + Range { + start: pre_start, + end: pre_end, + }, + post_end - post_start, + ) + } +} +impl SplitAt for range::Range { + type Output = range::Range; + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_start, post_start] = minmax(self.start, idx); + let [pre_end, post_end] = minmax(self.end, idx); + ( + range::Range { + start: pre_start, + end: pre_end, + }, + post_end - post_start, + ) + } +} +impl SplitAt for RangeInclusive { + type Output = Range; + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_start, post_start] = minmax(*self.start(), idx); + let [pre_end, post_end] = minmax( + match self.end_bound() { + Bound::Unbounded => 0, + Bound::Excluded(&x) => x, + // will result in invalid or empty ranges on overflow. + Bound::Included(&x) => x + 1, + }, + idx, + ); + ( + Range { + start: pre_start, + end: pre_end, + }, + post_end - post_start, + ) + } +} +impl SplitAt for range::RangeInclusive { + type Output = range::Range; + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_start, post_start] = minmax(self.start, idx); + let [pre_end, post_end] = minmax(self.end + 1, idx); + ( + range::Range { + start: pre_start, + end: pre_end, + }, + post_end - post_start, + ) + } +} +impl SplitAt for RangeTo { + type Output = RangeTo; + #[inline] + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_end, post_end] = minmax(self.end, idx); + (RangeTo { end: pre_end }, post_end - idx) + } +} +impl SplitAt for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn split_at(self, idx: usize) -> (Self::Output, usize) { + let [pre_end, post_end] = minmax(self.end + 1, idx); + (RangeTo { end: pre_end }, post_end - idx) + } +} + +#[test] +fn len() { + assert_eq!(Len::len(&0), 1); + assert_eq!(Len::len(&Range { start: 0, end: 0 }), 0); + assert_eq!(Len::len(&range::Range { start: 0, end: 0 }), 0); + assert_eq!(Len::len(&RangeTo { end: 0 }), 0); + + assert_eq!(Len::len(&Range { start: 0, end: 1 }), 1); + assert_eq!(Len::len(&range::Range { start: 0, end: 1 }), 1); + assert_eq!(Len::len(&RangeTo { end: 1 }), 1); + + assert_eq!( + Len::len(&Range { + start: 0, + end: usize::MAX + }), + usize::MAX + ); + assert_eq!( + Len::len(&range::Range { + start: 0, + end: usize::MAX + }), + usize::MAX + ); + assert_eq!(Len::len(&RangeTo { end: usize::MAX }), usize::MAX); +} + +#[test] +#[expect(clippy::too_many_lines)] +fn subtract_from_edge() { + assert_eq!( + Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 0, end: 1 }), + // `0..0`` would also be acceptable + Range { start: 1, end: 1 }, + ); + assert_eq!( + Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 3 }), + Range { start: 3, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 3, end: 3 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 3, end: 4 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 2, end: 3 }), + Range { start: 1, end: 2 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 2, end: 4 }), + Range { start: 1, end: 2 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 4 }), + // `1..1` would alsop be acceptable + Range { start: 3, end: 3 }, + ); + assert_eq!( + Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 4 }), + // `1..1` would alsop be acceptable + Range { start: 3, end: 3 }, + ); + + assert_eq!( + range::Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 0 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 0, end: 1 }), + // `0..0`` would also be acceptable + Range { start: 1, end: 1 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + range::Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + range::Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + range::Range { start: 1, end: 1 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 1, end: 1 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 3 }), + Range { start: 3, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 2, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 3, end: 3 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 3, end: 4 }), + Range { start: 1, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 2, end: 3 }), + Range { start: 1, end: 2 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 2, end: 4 }), + Range { start: 1, end: 2 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 1, end: 4 }), + // `1..1` would alsop be acceptable + Range { start: 3, end: 3 }, + ); + assert_eq!( + range::Range { start: 1, end: 3 }.subtract_from_edge(Range { start: 0, end: 4 }), + // `1..1` would alsop be acceptable + Range { start: 3, end: 3 }, + ); + + // RangeTo + assert_eq!( + RangeTo { end: 0 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeTo { end: 0 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeTo { end: 0 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeTo { end: 1 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + RangeTo { end: 1 }.subtract_from_edge(Range { start: 0, end: 1 }), + // `0..0`` would also be acceptable + Range { start: 1, end: 1 }, + ); + assert_eq!( + RangeTo { end: 1 }.subtract_from_edge(Range { start: 1, end: 1 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + RangeTo { end: 1 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 0, end: 0 }), + Range { start: 0, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 0, end: 1 }), + Range { start: 1, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 0, end: 2 }), + Range { start: 2, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 0, end: 3 }), + Range { start: 2, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 2, end: 2 }), + Range { start: 0, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 2, end: 3 }), + Range { start: 0, end: 2 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 1, end: 2 }), + Range { start: 0, end: 1 }, + ); + assert_eq!( + RangeTo { end: 2 }.subtract_from_edge(Range { start: 1, end: 3 }), + Range { start: 0, end: 1 }, + ); +} + +#[test] +fn limit_explicit_bounds() { + assert_eq!(0.limit_explicit_bounds(0), Range { start: 0, end: 0 }); + assert_eq!(0.limit_explicit_bounds(1), Range { start: 0, end: 1 }); + assert_eq!(1.limit_explicit_bounds(1), Range { start: 1, end: 1 }); + assert_eq!(5.limit_explicit_bounds(2), Range { start: 2, end: 2 }); + + assert_eq!( + Range { start: 0, end: 0 }.limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 0, end: 1 }.limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 2, end: 4 }.limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + Range { start: 1, end: 20 }.limit_explicit_bounds(5), + Range { start: 1, end: 5 }, + ); + + assert_eq!( + range::Range { start: 0, end: 0 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 2, end: 4 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 1, end: 20 }.limit_explicit_bounds(5), + range::Range { start: 1, end: 5 }, + ); + + assert_eq!( + RangeInclusive::new(0, 0).limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeInclusive::new(0, 1).limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeInclusive::new(2, 4).limit_explicit_bounds(0), + Range { start: 0, end: 0 }, + ); + assert_eq!( + RangeInclusive::new(1, 20).limit_explicit_bounds(5), + Range { start: 1, end: 5 }, + ); + + assert_eq!( + range::RangeInclusive { start: 0, end: 0 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 1 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::RangeInclusive { start: 2, end: 4 }.limit_explicit_bounds(0), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::RangeInclusive { start: 1, end: 20 }.limit_explicit_bounds(5), + range::Range { start: 1, end: 5 }, + ); + + assert_eq!(RangeTo { end: 0 }.limit_explicit_bounds(0), RangeTo { end: 0 },); + assert_eq!(RangeTo { end: 1 }.limit_explicit_bounds(0), RangeTo { end: 0 },); + assert_eq!(RangeTo { end: 20 }.limit_explicit_bounds(5), RangeTo { end: 5 },); + + assert_eq!(RangeToInclusive { end: 0 }.limit_explicit_bounds(0), RangeTo { end: 0 },); + assert_eq!(RangeToInclusive { end: 1 }.limit_explicit_bounds(0), RangeTo { end: 0 },); + assert_eq!( + RangeToInclusive { end: 20 }.limit_explicit_bounds(5), + RangeTo { end: 5 }, + ); + + assert_eq!(RangeFrom { start: 0 }.limit_explicit_bounds(0), RangeFrom { start: 0 },); + assert_eq!(RangeFrom { start: 1 }.limit_explicit_bounds(0), RangeFrom { start: 0 },); + assert_eq!(RangeFrom { start: 20 }.limit_explicit_bounds(5), RangeFrom { start: 5 },); + + assert_eq!( + range::RangeFrom { start: 0 }.limit_explicit_bounds(0), + range::RangeFrom { start: 0 }, + ); + assert_eq!( + range::RangeFrom { start: 1 }.limit_explicit_bounds(0), + range::RangeFrom { start: 0 }, + ); + assert_eq!( + range::RangeFrom { start: 20 }.limit_explicit_bounds(5), + range::RangeFrom { start: 5 }, + ); +} + +#[test] +#[expect(clippy::too_many_lines)] +fn with_stride() { + assert_eq!(0.with_stride(1), Range { start: 0, end: 1 }); + assert_eq!(0.with_stride(2), Range { start: 0, end: 2 }); + assert_eq!(1.with_stride(1), Range { start: 1, end: 2 }); + assert_eq!(1.with_stride(2), Range { start: 2, end: 4 }); + assert_eq!(2.with_stride(4), Range { start: 8, end: 12 }); + + assert_eq!(Range { start: 0, end: 0 }.with_stride(1), Range { start: 0, end: 0 },); + assert_eq!(Range { start: 0, end: 1 }.with_stride(1), Range { start: 0, end: 1 },); + assert_eq!(Range { start: 2, end: 6 }.with_stride(1), Range { start: 2, end: 6 },); + assert_eq!(Range { start: 0, end: 0 }.with_stride(2), Range { start: 0, end: 0 },); + assert_eq!(Range { start: 0, end: 2 }.with_stride(2), Range { start: 0, end: 4 },); + assert_eq!(Range { start: 4, end: 10 }.with_stride(5), Range { start: 20, end: 50 },); + + assert_eq!( + range::Range { start: 0, end: 0 }.with_stride(1), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 1 }.with_stride(1), + range::Range { start: 0, end: 1 }, + ); + assert_eq!( + range::Range { start: 2, end: 6 }.with_stride(1), + range::Range { start: 2, end: 6 }, + ); + assert_eq!( + range::Range { start: 0, end: 0 }.with_stride(2), + range::Range { start: 0, end: 0 }, + ); + assert_eq!( + range::Range { start: 0, end: 2 }.with_stride(2), + range::Range { start: 0, end: 4 }, + ); + assert_eq!( + range::Range { start: 4, end: 10 }.with_stride(5), + range::Range { start: 20, end: 50 }, + ); + + assert_eq!(RangeInclusive::new(0, 0).with_stride(1), Range { start: 0, end: 1 },); + assert_eq!(RangeInclusive::new(0, 1).with_stride(1), Range { start: 0, end: 2 },); + assert_eq!(RangeInclusive::new(2, 6).with_stride(1), Range { start: 2, end: 7 },); + assert_eq!(RangeInclusive::new(0, 0).with_stride(2), Range { start: 0, end: 2 },); + assert_eq!(RangeInclusive::new(0, 2).with_stride(2), Range { start: 0, end: 6 },); + assert_eq!(RangeInclusive::new(4, 10).with_stride(5), Range { start: 20, end: 55 },); + + assert_eq!( + range::RangeInclusive { start: 0, end: 0 }.with_stride(1), + range::Range { start: 0, end: 1 }, + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 1 }.with_stride(1), + range::Range { start: 0, end: 2 }, + ); + assert_eq!( + range::RangeInclusive { start: 2, end: 6 }.with_stride(1), + range::Range { start: 2, end: 7 }, + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 0 }.with_stride(2), + range::Range { start: 0, end: 2 }, + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 2 }.with_stride(2), + range::Range { start: 0, end: 6 }, + ); + assert_eq!( + range::RangeInclusive { start: 4, end: 10 }.with_stride(5), + range::Range { start: 20, end: 55 }, + ); + + assert_eq!(RangeTo { end: 0 }.with_stride(1), RangeTo { end: 0 },); + assert_eq!(RangeTo { end: 1 }.with_stride(1), RangeTo { end: 1 },); + assert_eq!(RangeTo { end: 6 }.with_stride(1), RangeTo { end: 6 },); + assert_eq!(RangeTo { end: 0 }.with_stride(2), RangeTo { end: 0 },); + assert_eq!(RangeTo { end: 2 }.with_stride(2), RangeTo { end: 4 },); + assert_eq!(RangeTo { end: 10 }.with_stride(5), RangeTo { end: 50 },); + + assert_eq!(RangeToInclusive { end: 0 }.with_stride(1), RangeTo { end: 1 },); + assert_eq!(RangeToInclusive { end: 1 }.with_stride(1), RangeTo { end: 2 },); + assert_eq!(RangeToInclusive { end: 6 }.with_stride(1), RangeTo { end: 7 },); + assert_eq!(RangeToInclusive { end: 0 }.with_stride(2), RangeTo { end: 2 },); + assert_eq!(RangeToInclusive { end: 2 }.with_stride(2), RangeTo { end: 6 },); + assert_eq!(RangeToInclusive { end: 10 }.with_stride(5), RangeTo { end: 55 },); + + assert_eq!(RangeFrom { start: 0 }.with_stride(1), RangeFrom { start: 0 },); + assert_eq!(RangeFrom { start: 1 }.with_stride(1), RangeFrom { start: 1 },); + assert_eq!(RangeFrom { start: 6 }.with_stride(1), RangeFrom { start: 6 },); + assert_eq!(RangeFrom { start: 0 }.with_stride(2), RangeFrom { start: 0 },); + assert_eq!(RangeFrom { start: 2 }.with_stride(2), RangeFrom { start: 4 },); + assert_eq!(RangeFrom { start: 10 }.with_stride(5), RangeFrom { start: 50 },); + + assert_eq!( + range::RangeFrom { start: 0 }.with_stride(1), + range::RangeFrom { start: 0 }, + ); + assert_eq!( + range::RangeFrom { start: 1 }.with_stride(1), + range::RangeFrom { start: 1 }, + ); + assert_eq!( + range::RangeFrom { start: 6 }.with_stride(1), + range::RangeFrom { start: 6 }, + ); + assert_eq!( + range::RangeFrom { start: 0 }.with_stride(2), + range::RangeFrom { start: 0 }, + ); + assert_eq!( + range::RangeFrom { start: 2 }.with_stride(2), + range::RangeFrom { start: 4 }, + ); + assert_eq!( + range::RangeFrom { start: 10 }.with_stride(5), + range::RangeFrom { start: 50 }, + ); +} + +#[test] +#[expect(clippy::too_many_lines)] +fn split_at() { + assert_eq!(0.split_at(0), (Range { start: 0, end: 0 }, 1)); + assert_eq!(0.split_at(1), (Range { start: 0, end: 1 }, 0)); + assert_eq!(1.split_at(0), (Range { start: 0, end: 0 }, 1)); + assert_eq!(1.split_at(1), (Range { start: 1, end: 1 }, 1)); + assert_eq!(5.split_at(20), (Range { start: 5, end: 6 }, 0)); + + assert_eq!(Range { start: 0, end: 0 }.split_at(0), (Range { start: 0, end: 0 }, 0),); + assert_eq!(Range { start: 0, end: 1 }.split_at(0), (Range { start: 0, end: 0 }, 1),); + assert_eq!(Range { start: 0, end: 0 }.split_at(1), (Range { start: 0, end: 0 }, 0),); + assert_eq!(Range { start: 0, end: 5 }.split_at(1), (Range { start: 0, end: 1 }, 4),); + assert_eq!(Range { start: 1, end: 1 }.split_at(0), (Range { start: 0, end: 0 }, 0),); + assert_eq!(Range { start: 1, end: 2 }.split_at(0), (Range { start: 0, end: 0 }, 1),); + assert_eq!(Range { start: 1, end: 1 }.split_at(1), (Range { start: 1, end: 1 }, 0),); + assert_eq!(Range { start: 1, end: 5 }.split_at(2), (Range { start: 1, end: 2 }, 3),); + assert_eq!( + Range { start: 20, end: 200 }.split_at(55), + (Range { start: 20, end: 55 }, 145), + ); + + assert_eq!( + range::Range { start: 0, end: 0 }.split_at(0), + (range::Range { start: 0, end: 0 }, 0), + ); + assert_eq!( + range::Range { start: 0, end: 1 }.split_at(0), + (range::Range { start: 0, end: 0 }, 1), + ); + assert_eq!( + range::Range { start: 0, end: 0 }.split_at(1), + (range::Range { start: 0, end: 0 }, 0), + ); + assert_eq!( + range::Range { start: 0, end: 5 }.split_at(1), + (range::Range { start: 0, end: 1 }, 4), + ); + assert_eq!( + range::Range { start: 1, end: 1 }.split_at(0), + (range::Range { start: 0, end: 0 }, 0), + ); + assert_eq!( + range::Range { start: 1, end: 2 }.split_at(0), + (range::Range { start: 0, end: 0 }, 1), + ); + assert_eq!( + range::Range { start: 1, end: 1 }.split_at(1), + (range::Range { start: 1, end: 1 }, 0), + ); + assert_eq!( + range::Range { start: 1, end: 5 }.split_at(2), + (range::Range { start: 1, end: 2 }, 3), + ); + assert_eq!( + range::Range { start: 20, end: 200 }.split_at(55), + (range::Range { start: 20, end: 55 }, 145), + ); + + assert_eq!(RangeInclusive::new(0, 0).split_at(0), (Range { start: 0, end: 0 }, 1),); + assert_eq!(RangeInclusive::new(0, 1).split_at(0), (Range { start: 0, end: 0 }, 2),); + assert_eq!(RangeInclusive::new(0, 0).split_at(1), (Range { start: 0, end: 1 }, 0),); + assert_eq!(RangeInclusive::new(0, 5).split_at(1), (Range { start: 0, end: 1 }, 5),); + assert_eq!(RangeInclusive::new(1, 1).split_at(0), (Range { start: 0, end: 0 }, 1),); + assert_eq!(RangeInclusive::new(1, 2).split_at(0), (Range { start: 0, end: 0 }, 2),); + assert_eq!(RangeInclusive::new(1, 1).split_at(1), (Range { start: 1, end: 1 }, 1),); + assert_eq!(RangeInclusive::new(1, 5).split_at(2), (Range { start: 1, end: 2 }, 4),); + assert_eq!( + RangeInclusive::new(20, 200).split_at(55), + (Range { start: 20, end: 55 }, 146), + ); + + assert_eq!( + range::RangeInclusive { start: 0, end: 0 }.split_at(0), + (range::Range { start: 0, end: 0 }, 1), + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 1 }.split_at(0), + (range::Range { start: 0, end: 0 }, 2), + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 0 }.split_at(1), + (range::Range { start: 0, end: 1 }, 0), + ); + assert_eq!( + range::RangeInclusive { start: 0, end: 5 }.split_at(1), + (range::Range { start: 0, end: 1 }, 5), + ); + assert_eq!( + range::RangeInclusive { start: 1, end: 1 }.split_at(0), + (range::Range { start: 0, end: 0 }, 1), + ); + assert_eq!( + range::RangeInclusive { start: 1, end: 2 }.split_at(0), + (range::Range { start: 0, end: 0 }, 2), + ); + assert_eq!( + range::RangeInclusive { start: 1, end: 1 }.split_at(1), + (range::Range { start: 1, end: 1 }, 1), + ); + assert_eq!( + range::RangeInclusive { start: 1, end: 5 }.split_at(2), + (range::Range { start: 1, end: 2 }, 4), + ); + assert_eq!( + range::RangeInclusive { start: 20, end: 200 }.split_at(55), + (range::Range { start: 20, end: 55 }, 146), + ); +} diff --git a/clippy_data_structures/src/slice_set.rs b/clippy_data_structures/src/slice_set.rs new file mode 100644 index 000000000000..5636b7735ba3 --- /dev/null +++ b/clippy_data_structures/src/slice_set.rs @@ -0,0 +1,195 @@ +use crate::sorted; +use crate::traits::SortedIndex; +use core::borrow::Borrow; +use core::mem::{MaybeUninit, transmute}; +use core::ops::Deref; +use core::{iter, slice}; +use rustc_arena::DroplessArena; + +/// A wrapper around a slice where all items are unique and sorted. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(transparent)] +pub struct SliceSet { + data: [T], +} +impl SliceSet { + /// Gets an empty set. + #[inline] + #[must_use] + pub const fn empty<'a>() -> &'a Self { + Self::from_sorted_unchecked(&[]) + } + + /// Interprets the reference as a set containing a single item. + #[inline] + #[must_use] + pub const fn from_ref(value: &T) -> &Self { + Self::from_sorted_unchecked(slice::from_ref(value)) + } + + /// Same as `from_sorted`, but without debug assertions. + #[inline] + pub(crate) const fn from_sorted_unchecked(slice: &[T]) -> &Self { + // SAFETY: `SliceSet`` is a transparent wrapper around `T`. + unsafe { transmute::<&[T], &SliceSet>(slice) } + } + + /// Gets the current set as a regular slice. + #[inline] + #[must_use] + pub const fn as_raw_slice(&self) -> &[T] { + &self.data + } + + /// Checks if the set contains the given value. + #[inline] + #[must_use] + pub fn contains(&self, item: &Q) -> bool + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data.binary_search_by(|x| x.borrow().cmp(item)).is_ok() + } + + /// Gets the specified item from the set. Returns `None` if it doesn't exist. + #[inline] + #[must_use] + pub fn get(&self, item: &Q) -> Option<&T> + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data + .binary_search_by(|x| x.borrow().cmp(item)) + .ok() + .map(|i| &self.data[i]) + } + + /// Gets the index of the specified item in the set. Returns `None` if it doesn't exist. + #[inline] + #[must_use] + pub fn get_index(&self, item: &Q) -> Option + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data.binary_search_by(|x| x.borrow().cmp(item)).ok() + } + + /// Gets a subset of the current set. + #[inline] + #[must_use] + pub fn get_range(&self, range: impl SortedIndex) -> &Self + where + T: Borrow, + Q: Ord + ?Sized, + { + Self::from_sorted_unchecked( + &self.data[range.find_range(&self.data, |slice, target| { + slice.binary_search_by(|x| x.borrow().cmp(target)) + })], + ) + } +} +impl SliceSet { + /// Assumes the given slice is sorted with no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice is unsorted or contains + /// duplicates. + #[inline] + #[must_use] + pub fn from_sorted(slice: &[T]) -> &Self { + debug_assert!(sorted::is_slice_set(slice)); + Self::from_sorted_unchecked(slice) + } + + /// Sorts the given slice and assumes no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice contains duplicates. + #[inline] + #[must_use] + pub fn from_unsorted_slice(slice: &mut [T]) -> &Self { + slice.sort_unstable(); + Self::from_sorted(slice) + } + + /// Sorts and partitions out duplicates from the given slice. + #[inline] + #[must_use] + pub fn from_unsorted_slice_dedup(slice: &mut [T]) -> &Self { + slice.sort_unstable(); + Self::from_sorted_unchecked(slice.partition_dedup().0) + } + + /// Checks if this set is a subset of another. + #[inline] + #[must_use] + pub fn is_subset_of(&self, other: &Self) -> bool { + if self.len() > other.len() { + return false; + } + if sorted::should_binary_search(other.len(), self.len()) { + sorted::is_subset_of_binary(self, other) + } else { + sorted::is_subset_of_linear(self, other) + } + } + + /// Checks if this set is a superset of another. + #[inline] + #[must_use] + pub fn is_superset_of(&self, other: &Self) -> bool { + other.is_subset_of(self) + } +} +impl SliceSet { + /// Creates a new set allocated into an arena which is the union of two sorted lists. + /// + /// # Panics + /// Panics if either iterator returns more than their `len` functions indicate. + #[inline] + #[must_use] + pub fn from_sorted_union_into_arena( + arena: &DroplessArena, + xs: impl IntoIterator, + ys: impl IntoIterator, + ) -> &Self { + let xs = xs.into_iter(); + let ys = ys.into_iter(); + let len = xs.len().checked_add(ys.len()).unwrap(); + if len == 0 { + Self::empty() + } else { + Self::from_sorted(sorted::union_fill_uninit( + arena.alloc_from_iter(iter::repeat_with(|| MaybeUninit::uninit()).take(len)), + xs, + ys, + Ord::cmp, + )) + } + } +} + +impl Deref for SliceSet { + type Target = [T]; + #[inline] + fn deref(&self) -> &Self::Target { + &self.data + } +} +impl Borrow<[T]> for SliceSet { + #[inline] + fn borrow(&self) -> &[T] { + &self.data + } +} + +impl<'a, T> IntoIterator for &'a SliceSet { + type Item = &'a T; + type IntoIter = slice::Iter<'a, T>; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.data.iter() + } +} diff --git a/clippy_data_structures/src/sorted.rs b/clippy_data_structures/src/sorted.rs new file mode 100644 index 000000000000..c5166e02fb3d --- /dev/null +++ b/clippy_data_structures/src/sorted.rs @@ -0,0 +1,106 @@ +use core::cmp::Ordering; +use core::mem::MaybeUninit; + +/// Determines whether a binary or linear search should be used when searching for `search_count` +/// sorted items in a sorted list of the given size. +#[inline] +pub fn should_binary_search(list_size: usize, search_count: usize) -> bool { + // Using binary search has a complexity of `O(log2(list_size) * search_count)` with an average + // case only slightly better. This roughly calculates if the binary search will be faster, + // erring on the side of a linear search. + + // This is essentially `search_count < list_size / list_size.ilog2().next_power_of_two() / 2`, + // but with better codegen. + let log2 = (usize::BITS - 1).wrapping_sub(list_size.leading_zeros()); + // If `log2` is `MAX` then `list_size` is zero. Shifting by the maximum amount is fine. + // If `log2` is 64 then `list_size` is one. Shifting by zero is fine. + // In all other cases `log2` will be in the `0..BITS` range. + search_count < list_size.wrapping_shr(usize::BITS - log2.leading_zeros()) +} + +/// Merges the two sorted lists into `dst`, discarding any duplicates between the two. +/// +/// # Panics +/// Panics if `dst` is too small to contain the merged list. +pub fn union_fill_uninit( + dst: &mut [MaybeUninit], + mut xs: impl Iterator, + mut ys: impl Iterator, + mut cmp: impl FnMut(&T, &T) -> Ordering, +) -> &mut [T] { + // n.b. `dst_iter` must be moved exactly once for each item written. + let mut dst_iter = dst.iter_mut(); + let mut next_x = xs.next(); + let mut next_y = ys.next(); + loop { + match (next_x, next_y) { + (Some(x), Some(y)) => match cmp(&x, &y) { + Ordering::Equal => { + dst_iter.next().unwrap().write(x); + next_x = xs.next(); + next_y = ys.next(); + }, + Ordering::Less => { + dst_iter.next().unwrap().write(x); + next_x = xs.next(); + next_y = Some(y); + }, + Ordering::Greater => { + dst_iter.next().unwrap().write(y); + next_x = Some(x); + next_y = ys.next(); + }, + }, + (Some(x), None) => { + dst_iter.next().unwrap().write(x); + xs.for_each(|x| { + dst_iter.next().unwrap().write(x); + }); + break; + }, + (None, Some(y)) => { + dst_iter.next().unwrap().write(y); + ys.for_each(|y| { + dst_iter.next().unwrap().write(y); + }); + break; + }, + (None, None) => break, + } + } + + let remain = dst_iter.into_slice().len(); + let end = dst.len() - remain; + // Safety: Every item returned by `dst_iter` was written to. + unsafe { dst[..end].assume_init_mut() } +} + +pub fn is_subset_of_linear(xs: &[T], ys: &[T]) -> bool { + let mut ys = ys.iter(); + 'outer: for x in xs { + for y in &mut ys { + match x.cmp(y) { + Ordering::Equal => continue 'outer, + Ordering::Less => return false, + Ordering::Greater => {}, + } + } + return false; + } + true +} + +pub fn is_subset_of_binary(xs: &[T], mut ys: &[T]) -> bool { + for x in xs { + match ys.binary_search(x) { + Ok(i) => ys = &ys[i + 1..], + Err(_) => return false, + } + } + true +} + +/// Checks is a slice is ordered with no duplicates. +pub fn is_slice_set(slice: &[T]) -> bool { + slice.array_windows::<2>().all(|[x, y]| x.cmp(y).is_lt()) +} diff --git a/clippy_data_structures/src/traits.rs b/clippy_data_structures/src/traits.rs new file mode 100644 index 000000000000..140ffc07e217 --- /dev/null +++ b/clippy_data_structures/src/traits.rs @@ -0,0 +1,92 @@ +use core::ops::{Range, RangeBounds, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::range; +use core::slice::SliceIndex; + +/// A helper trait for getting a range of items from a sorted slice. +pub trait SortedIndex { + type Result: SliceIndex<[T], Output = [T]> + RangeBounds; + fn find_range(self, slice: &[T], find: impl FnMut(&[T], &Q) -> Result) -> Self::Result; +} +impl SortedIndex for RangeFull { + type Result = RangeFull; + fn find_range(self, _: &[T], _: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + self + } +} +impl SortedIndex for Range<&Q> { + type Result = Range; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let (Ok(end) | Err(end)) = find(slice, self.end); + Range { start, end } + } +} +impl SortedIndex for range::Range<&Q> { + type Result = range::Range; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let (Ok(end) | Err(end)) = find(slice, self.end); + range::Range { start, end } + } +} +impl SortedIndex for RangeInclusive<&Q> { + type Result = RangeInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, *self.start()); + let end = match find(slice, *self.end()) { + Ok(i) => i + 1, + Err(i) => i, + }; + RangeInclusive::new(start, end) + } +} +impl SortedIndex for range::RangeInclusive<&Q> { + type Result = range::RangeInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let end = match find(slice, self.end) { + Ok(i) => i + 1, + Err(i) => i, + }; + range::RangeInclusive { start, end } + } +} +impl SortedIndex for RangeFrom<&Q> { + type Result = RangeFrom; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + RangeFrom { start } + } +} +impl SortedIndex for range::RangeFrom<&Q> { + type Result = range::RangeFrom; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + range::RangeFrom { start } + } +} +impl SortedIndex for RangeTo<&Q> { + type Result = RangeTo; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(end) | Err(end)) = find(slice, self.end); + RangeTo { end } + } +} +impl SortedIndex for RangeToInclusive<&Q> { + type Result = RangeToInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let end = match find(slice, self.end) { + Ok(i) => i + 1, + Err(i) => i, + }; + RangeToInclusive { end } + } +} diff --git a/clippy_data_structures/tests/bit_slice.rs b/clippy_data_structures/tests/bit_slice.rs new file mode 100644 index 000000000000..ffa4ea686eca --- /dev/null +++ b/clippy_data_structures/tests/bit_slice.rs @@ -0,0 +1,120 @@ +#![feature(rustc_private)] +#![allow( + clippy::cast_possible_truncation, + clippy::unreadable_literal, + clippy::range_minus_one +)] + +use clippy_data_structures::bit_slice::{BitSlice, WORD_BITS}; + +#[test] +fn union_intersect_subtract() { + let mut x = [0b10101010, 0b10101010]; + let mut y = [0b01010101, 0b11110000]; + let x = BitSlice::::from_words_mut(&mut x); + let y = BitSlice::::from_words_mut(&mut y); + + assert!(x.union_trunc(y)); + assert_eq!(&x.words, &[0b11111111, 0b11111010]); + assert!(!x.union_trunc(y)); + assert_eq!(&x.words, &[0b11111111, 0b11111010]); + assert!(x.subtract(y)); + assert_eq!(&x.words, &[0b10101010, 0b00001010]); + assert!(!x.subtract(y)); + assert_eq!(&x.words, &[0b10101010, 0b00001010]); + + assert!(x.union_trunc(y)); + assert_eq!(&x.words, &[0b11111111, 0b11111010]); + assert!(x.intersect(y)); + assert_eq!(&x.words, &[0b01010101, 0b11110000]); + assert!(!x.intersect(y)); + assert_eq!(&x.words, &[0b01010101, 0b11110000]); + + x.clear(); + assert!(x.is_empty()); + assert!(!x.subtract(y)); + assert!(!x.intersect(y)); + assert!(x.union_trunc(y)); + assert!(x.words == y.words); +} + +#[test] +fn insert_range() { + let mut x = [0, 0, 0]; + let x = BitSlice::::from_words_mut(&mut x); + + x.insert_range(0..WORD_BITS); + assert_eq!(&x.words, &[!0, 0, 0]); + + x.insert_range(1..=WORD_BITS); + assert_eq!(&x.words, &[!0, 1, 0]); + + x.insert_range(..WORD_BITS + 3); + assert_eq!(&x.words, &[!0, 0b111, 0]); + + x.insert_range(..=WORD_BITS * 2 - 1); + assert_eq!(&x.words, &[!0, !0, 0]); + + x.insert_range(WORD_BITS * 2 + 1..); + assert_eq!(&x.words, &[!0, !0, !1]); + + x.clear(); + x.insert_range(WORD_BITS / 2..WORD_BITS * 2 + WORD_BITS / 2); + assert_eq!(&x.words, &[!0 << (WORD_BITS / 2), !0, !0 >> (WORD_BITS / 2)]); + + x.clear(); + x.insert_range(0..0); + assert_eq!(&x.words, &[0, 0, 0]); + + x.insert_range(1..WORD_BITS - 1); + assert_eq!(&x.words, &[!1 & (!0 >> 1), 0, 0]); +} + +#[test] +fn iter_insert_remove_contains() { + let mut x = [0, 0, 0]; + let x = BitSlice::::from_words_mut(&mut x); + + assert!(x.iter().eq::<[usize; 0]>([])); + + assert!(!x.contains(1)); + assert!(x.insert(1)); + assert!(x.contains(1)); + assert!(x.iter().eq([1])); + + assert!(!x.contains(2)); + assert!(x.insert(2)); + assert!(x.contains(2)); + assert!(x.iter().eq([1, 2])); + + assert!(!x.contains(0)); + assert!(x.insert(0)); + assert!(x.contains(0)); + assert!(x.iter().eq([0, 1, 2])); + + assert!(!x.contains(WORD_BITS)); + assert!(x.insert(WORD_BITS)); + assert!(x.contains(WORD_BITS)); + assert!(x.iter().eq([0, 1, 2, WORD_BITS])); + + assert!(!x.contains(WORD_BITS * 2 + 1)); + assert!(x.insert(WORD_BITS * 2 + 1)); + assert!(x.contains(WORD_BITS * 2 + 1)); + assert!(x.iter().eq([0, 1, 2, WORD_BITS, WORD_BITS * 2 + 1])); + + assert!(!x.insert(0)); + assert!(x.iter().eq([0, 1, 2, WORD_BITS, WORD_BITS * 2 + 1])); + + assert!(x.remove(0)); + assert!(!x.contains(0)); + assert!(x.iter().eq([1, 2, WORD_BITS, WORD_BITS * 2 + 1])); + + assert!(!x.remove(0)); + assert!(x.iter().eq([1, 2, WORD_BITS, WORD_BITS * 2 + 1])); + + assert!(!x.contains(WORD_BITS * 2)); + assert!(x.insert(WORD_BITS * 2)); + assert!(x.contains(WORD_BITS * 2)); + assert!(x.drain().eq([1, 2, WORD_BITS, WORD_BITS * 2, WORD_BITS * 2 + 1])); + assert!(x.is_empty()); +} diff --git a/clippy_data_structures/tests/bit_slice_2d.rs b/clippy_data_structures/tests/bit_slice_2d.rs new file mode 100644 index 000000000000..a827285d1f68 --- /dev/null +++ b/clippy_data_structures/tests/bit_slice_2d.rs @@ -0,0 +1,221 @@ +#![feature(rustc_private)] +#![allow(clippy::too_many_lines, clippy::cast_possible_truncation)] + +use clippy_data_structures::BitSlice2d; +use clippy_data_structures::bit_slice::WORD_BITS; + +#[test] +#[rustfmt::skip] +fn row_iter_copy_move_3x3() { + let mut x = [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + ]; + let mut x = BitSlice2d::<'_, usize, usize>::from_mut_words(&mut x, 3, 3 * WORD_BITS as u32); + + assert!(x.iter_rows(..).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101].as_slice(), + ])); + assert!(x.iter_rows(0).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + ])); + assert!(x.iter_rows(1).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(..2).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(1..).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101].as_slice(), + ])); + assert!(x.iter_rows(1..2).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + + x.copy_rows(0..1, 2); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ], + ); + x.copy_rows(1, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ],); + x.copy_rows(1.., 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ],); + + x.move_rows(0, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ],); + x.move_rows(1..3, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ],); + x.move_rows(..2, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ], + ); +} + +#[test] +#[rustfmt::skip] +fn row_iter_copy_move_4x5() { + let mut x = [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + ]; + let mut x = BitSlice2d::<'_, usize, usize>::from_mut_words(&mut x, 5, 4 * WORD_BITS as u32 - 1); + + assert!(x.iter_rows(..).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101, 0b0101].as_slice(), + [0b1111, 0b1111, 0b1111, 0b1111].as_slice(), + [0b1001, 0b1001, 0b1001, 0b1001].as_slice(), + ])); + assert!(x.iter_rows(0).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + ])); + assert!(x.iter_rows(1).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(..2).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(1..).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101, 0b0101].as_slice(), + [0b1111, 0b1111, 0b1111, 0b1111].as_slice(), + [0b1001, 0b1001, 0b1001, 0b1001].as_slice(), + ])); + assert!(x.iter_rows(1..2).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + + x.copy_rows(0..1, 2); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.copy_rows(1, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.copy_rows(1.., 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + + x.move_rows(0, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(1..3, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(..2, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(1..4, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(2..=4, 1); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + ] + ); +} diff --git a/clippy_data_structures/tests/growable_bit_set_2d.rs b/clippy_data_structures/tests/growable_bit_set_2d.rs new file mode 100644 index 000000000000..dab657252829 --- /dev/null +++ b/clippy_data_structures/tests/growable_bit_set_2d.rs @@ -0,0 +1,490 @@ +#![feature(rustc_private)] +#![allow(clippy::too_many_lines, clippy::cast_possible_truncation)] + +use clippy_data_structures::GrowableBitSet2d; +use clippy_data_structures::bit_slice::WORD_BITS; + +#[test] +#[rustfmt::skip] +fn row_iter_copy_move_3x3() { + let mut x = GrowableBitSet2d::::new(3 * WORD_BITS as u32); + x.ensure_row(0).words.copy_from_slice(&[0, 0, 0]); + x.ensure_row(1).words.copy_from_slice(&[0b1010, 0b1010, 0b1010]); + x.ensure_row(2).words.copy_from_slice(&[0b0101, 0b0101, 0b0101]); + + assert!(x.iter_rows(..).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101].as_slice(), + ])); + assert!(x.iter_rows(0).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + ])); + assert!(x.iter_rows(1).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(..2).map(|x| &x.words).eq([ + [0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(1..).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101].as_slice(), + ])); + assert!(x.iter_rows(1..2).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010].as_slice(), + ])); + + x.copy_rows(0..1, 2); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ], + ); + x.copy_rows(1, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ],); + x.copy_rows(1..3, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ],); + + x.move_rows(0, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ],); + x.move_rows(1..3, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ],); + x.move_rows(..2, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + ], + ); +} + +#[test] +#[rustfmt::skip] +fn row_iter_copy_move_4x5() { + let mut x = GrowableBitSet2d::::new(4 * WORD_BITS as u32 - 1); + x.ensure_row(0).words.copy_from_slice(&[0, 0, 0, 0]); + x.ensure_row(1).words.copy_from_slice(&[0b1010, 0b1010, 0b1010, 0b1010]); + x.ensure_row(2).words.copy_from_slice(&[0b0101, 0b0101, 0b0101, 0b0101]); + x.ensure_row(3).words.copy_from_slice(&[0b1111, 0b1111, 0b1111, 0b1111]); + x.ensure_row(4).words.copy_from_slice(&[0b1001, 0b1001, 0b1001, 0b1001]); + + assert!(x.iter_rows(..).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101, 0b0101].as_slice(), + [0b1111, 0b1111, 0b1111, 0b1111].as_slice(), + [0b1001, 0b1001, 0b1001, 0b1001].as_slice(), + ])); + assert!(x.iter_rows(0).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + ])); + assert!(x.iter_rows(1).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(..2).map(|x| &x.words).eq([ + [0, 0, 0, 0].as_slice(), + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + assert!(x.iter_rows(1..).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + [0b0101, 0b0101, 0b0101, 0b0101].as_slice(), + [0b1111, 0b1111, 0b1111, 0b1111].as_slice(), + [0b1001, 0b1001, 0b1001, 0b1001].as_slice(), + ])); + assert!(x.iter_rows(1..2).map(|x| &x.words).eq([ + [0b1010, 0b1010, 0b1010, 0b1010].as_slice(), + ])); + + x.copy_rows(0..1, 2); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.copy_rows(1, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.copy_rows(1..5, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0, 0, 0, 0, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + + x.move_rows(0, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(1..3, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(..2, 1); + assert_eq!( + x.words(), + [ + 0, 0, 0, 0, + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(1..4, 0); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1111, 0b1111, 0b1111, 0b1111, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + ] + ); + x.move_rows(2..=4, 1); + assert_eq!( + x.words(), + [ + 0b1010, 0b1010, 0b1010, 0b1010, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + 0b1001, 0b1001, 0b1001, 0b1001, + 0, 0, 0, 0, + ] + ); +} + +#[test] +#[rustfmt::skip] +fn row_copy_oob() { + let mut x = GrowableBitSet2d::::new(3 * WORD_BITS as u32 - 2); + x.ensure_row(0).words.copy_from_slice(&[0, 0, 0]); + x.ensure_row(1).words.copy_from_slice(&[0b1010, 0b1010, 0b1010]); + x.ensure_row(2).words.copy_from_slice(&[0b0101, 0b0101, 0b0101]); + x.ensure_row(3).words.copy_from_slice(&[0b1111, 0b1111, 0b1111]); + + x.copy_rows(0, 4); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + ] + ); + x.copy_rows(1, 5); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + ] + ); + x.copy_rows(..3, 7); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + ] + ); + x.copy_rows(1..3, 9); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + ] + ); + x.copy_rows(11..15, 0); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + ] + ); + x.copy_rows(9..12, 3); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + ] + ); + x.copy_rows(20..30, 9); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ] + ); + x.copy_rows(20, 30); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + ] + ); +} + +#[test] +#[rustfmt::skip] +fn row_move_oob() { + let mut x = GrowableBitSet2d::::new(3 * WORD_BITS as u32 - 2); + x.ensure_row(0).words.copy_from_slice(&[0, 0, 0]); + x.ensure_row(1).words.copy_from_slice(&[0b1010, 0b1010, 0b1010]); + x.ensure_row(2).words.copy_from_slice(&[0b0101, 0b0101, 0b0101]); + x.ensure_row(3).words.copy_from_slice(&[0b1111, 0b1111, 0b1111]); + + x.move_rows(0, 4); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + ] + ); + x.move_rows(1, 5); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0b0101, 0b0101, 0b0101, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + ] + ); + x.move_rows(..3, 7); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b0101, 0b0101, 0b0101, + ] + ); + x.ensure_row(1).words.fill(1); + x.move_rows(1..3, 9); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 1, 1, 1, + 0, 0, 0, + ] + ); + x.ensure_row(0).words.fill(0b10); + x.move_rows(11..15, 0); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0b1010, 0b1010, 0b1010, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 1, 1, 1, + 0, 0, 0, + ] + ); + x.ensure_row(10).words.fill(0b1111); + x.move_rows(9..12, 3); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 1, 1, 1, + 0b1111, 0b1111, 0b1111, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + ] + ); + x.move_rows(20..40, 4); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 1, 1, 1, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + ] + ); + x.move_rows(20, 30); + assert_eq!( + x.words(), + [ + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 1, 1, 1, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + 0, 0, 0, + ] + ); +} diff --git a/tests/dogfood.rs b/tests/dogfood.rs index 16a1a415102c..92ee08f6a582 100644 --- a/tests/dogfood.rs +++ b/tests/dogfood.rs @@ -35,6 +35,7 @@ fn dogfood() { for package in [ "./", + "clippy_data_structures", "clippy_dev", "clippy_lints_internal", "clippy_lints", From f5881d82a06f6c7c49e05fda47f1ec40fe76aeee Mon Sep 17 00:00:00 2001 From: Jason Newcomb Date: Wed, 23 Apr 2025 18:28:21 -0400 Subject: [PATCH 2/2] Add the MIR utils used by `redundant_clone` --- clippy_mir/Cargo.toml | 12 + clippy_mir/src/analysis.rs | 287 +++++++++++ clippy_mir/src/childless_projection.rs | 238 +++++++++ clippy_mir/src/lib.rs | 18 + clippy_mir/src/projection.rs | 675 +++++++++++++++++++++++++ clippy_mir/src/value_tracking.rs | 668 ++++++++++++++++++++++++ tests/dogfood.rs | 1 + 7 files changed, 1899 insertions(+) create mode 100644 clippy_mir/Cargo.toml create mode 100644 clippy_mir/src/analysis.rs create mode 100644 clippy_mir/src/childless_projection.rs create mode 100644 clippy_mir/src/lib.rs create mode 100644 clippy_mir/src/projection.rs create mode 100644 clippy_mir/src/value_tracking.rs diff --git a/clippy_mir/Cargo.toml b/clippy_mir/Cargo.toml new file mode 100644 index 000000000000..ec1903cfdce2 --- /dev/null +++ b/clippy_mir/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "clippy_mir" +version = "0.0.1" +edition = "2021" + +[dependencies] +clippy_data_structures = { path = "../clippy_data_structures" } +indexmap = "2.0.0" + +[package.metadata.rust-analyzer] +# This package uses #[feature(rustc_private)] +rustc_private = true diff --git a/clippy_mir/src/analysis.rs b/clippy_mir/src/analysis.rs new file mode 100644 index 000000000000..82ff74704d08 --- /dev/null +++ b/clippy_mir/src/analysis.rs @@ -0,0 +1,287 @@ +//! A simple framework for running dataflow analyses on the basic block graphs of MIR bodies. +//! +//! The main entry point is `run_analysis` which requires a few things to be set up first. +//! +//! * A `BlockOrderMap`. This defines the order in which that analysis will check blocks. +//! * A `WorkQueue`. Used by the analysis to track which blocks still need to analyzed. +//! * The graph edge list. Used by the analysis to know which blocks to transfer the result of an +//! analyzed block to. +//! * An `Analysis` impl. This defines the state type, each block's state transformation function +//! and the transfer function. +//! +//! Dataflow analysis works by starting with each node in a directed graph (basic blocks in this +//! case) getting an initial state and a work queue that contains every node. For each node in the +//! queue a few steps will be taken: +//! +//! * The node will be removed from the queue. +//! * That node's transformation function will take the node's current state to produce a new state. +//! Note that this does not modify the current node's state, only computes a new one. +//! * For each immediate successor node a transfer function will modify the successor's state using +//! the previously computed state. +//! * Each successor node which had their state changed are added to the work queue if they are not +//! already there. +//! +//! Once there are no nodes left to take from the work queue the analysis is complete. + +use clippy_data_structures::{SliceSet, bit_slice}; +use core::cmp::minmax; +use core::iter; +use rustc_arena::DroplessArena; +use rustc_index::IndexSlice; +use rustc_middle::mir::{BasicBlock, Body, TerminatorKind, UnwindAction}; + +rustc_index::newtype_index! { + /// A reordered block index. + #[orderable] + pub struct OrderedBlock {} +} + +/// Bi-directional mapping to reorder blocks. +pub struct BlockOrderMap<'a> { + from_ordered: &'a IndexSlice, + to_ordered: &'a IndexSlice, +} +impl<'a> BlockOrderMap<'a> { + /// Creates a new mapping for a reverse postorder ordering. + pub fn new_reverse_postorder(arena: &'a DroplessArena, body: &'a Body<'_>) -> Self { + let from_ordered = IndexSlice::::from_raw(body.basic_blocks.reverse_postorder()); + let to_ordered = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(|| OrderedBlock::ZERO).take(from_ordered.len())), + ); + for (x, &y) in from_ordered.iter_enumerated() { + to_ordered[y] = x; + } + + Self { + from_ordered, + to_ordered, + } + } + + #[inline] + #[must_use] + pub fn to_ordered(&self) -> &'a IndexSlice { + self.to_ordered + } + + #[inline] + #[must_use] + #[expect(clippy::wrong_self_convention)] + pub fn from_ordered(&self) -> &'a IndexSlice { + self.from_ordered + } +} + +/// Queue that will remove blocks in order. +pub struct WorkQueue<'arena> { + queue: &'arena mut [bit_slice::Word], + word: bit_slice::Word, + offset: u32, + domain_size: u32, +} +impl<'arena> WorkQueue<'arena> { + /// Creates a new empty queue for the given body. + #[expect(clippy::cast_possible_truncation)] + pub fn new(arena: &'arena DroplessArena, body: &Body<'_>) -> Self { + Self { + queue: arena.alloc_from_iter(iter::repeat_n( + 0, + bit_slice::word_count_from_bits(body.basic_blocks.len()), + )), + word: 0, + offset: 0, + domain_size: body.basic_blocks.len() as u32, + } + } + + /// Fills the queue with all blocks. + fn fill(&mut self) { + self.queue.fill(!0); + if let Some(word) = self.queue.last_mut() { + *word &= bit_slice::final_mask_for_size(self.domain_size as usize); + } + self.offset = 0; + self.word = self.queue.first().copied().unwrap_or(0); + } + + /// Extracts the next block in the queue. + #[expect(clippy::cast_possible_truncation)] + fn next(&mut self) -> Option { + if self.word == 0 { + self.queue[self.offset as usize] = 0; + self.offset += self.queue[self.offset as usize + 1..].iter().position(|&x| x != 0)? as u32 + 1; + self.word = self.queue[self.offset as usize]; + } + let bit = self.word.trailing_zeros() as usize; + self.word ^= 1 << bit; + Some(OrderedBlock::from_usize( + bit | (self.offset as usize * bit_slice::WORD_BITS), + )) + } + + /// Inserts a single block into the queue. + #[track_caller] + #[expect(clippy::cast_possible_truncation)] + pub fn insert(&mut self, block: OrderedBlock) { + debug_assert!(block.as_u32() < self.domain_size); + let word = block.as_u32() / bit_slice::WORD_BITS as u32; + let bit = 1 << (block.as_usize() % bit_slice::WORD_BITS); + + self.queue[self.offset as usize] = self.word; + self.queue[word as usize] |= bit; + self.offset = self.offset.min(word); + self.word |= self.queue[self.offset as usize]; + } + + /// Inserts a sorted sequence of blocks into the queue. + #[track_caller] + #[expect(clippy::cast_possible_truncation)] + pub fn insert_sorted(&mut self, blocks: impl IntoIterator) { + let mut blocks = blocks.into_iter(); + let Some(block) = blocks.next() else { + return; + }; + debug_assert!(block.as_u32() < self.domain_size); + let word = block.as_u32() / bit_slice::WORD_BITS as u32; + let bit = 1 << (block.as_usize() % bit_slice::WORD_BITS); + + self.queue[self.offset as usize] = self.word; + self.offset = self.offset.min(word); + + self.queue[word as usize] |= bit; + for block in blocks { + debug_assert!(block.as_u32() < self.domain_size); + let idx = block.as_usize() / bit_slice::WORD_BITS; + let bit = 1 << (block.as_usize() % bit_slice::WORD_BITS); + self.queue[idx] |= bit; + } + + self.word = self.queue[self.offset as usize]; + } +} + +/// Extracts the body's edges and orders them via the block map. +pub fn get_body_edges<'arena>( + arena: &'arena DroplessArena, + body: &Body<'_>, + block_map: &BlockOrderMap<'_>, +) -> &'arena IndexSlice> { + let blocks = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(SliceSet::empty).take(body.basic_blocks.len())), + ); + for (block, block_data) in body.basic_blocks.iter_enumerated() { + blocks[block_map.to_ordered[block]] = match block_data.terminator().kind { + TerminatorKind::Drop { + target, + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Call { + target: Some(target), + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Assert { + target, + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Yield { + resume: target, + drop: Some(cleanup), + .. + } => SliceSet::from_sorted( + arena.alloc_from_iter(minmax(block_map.to_ordered[target], block_map.to_ordered[cleanup])), + ), + + TerminatorKind::Goto { target } + | TerminatorKind::Drop { target, .. } + | TerminatorKind::Assert { target, .. } + | TerminatorKind::Call { + target: Some(target), .. + } + | TerminatorKind::Call { + unwind: UnwindAction::Cleanup(target), + .. + } + | TerminatorKind::Yield { resume: target, .. } + | TerminatorKind::FalseEdge { + real_target: target, .. + } + | TerminatorKind::FalseUnwind { + real_target: target, .. + } => SliceSet::from_ref(arena.alloc(block_map.to_ordered[target])), + + TerminatorKind::SwitchInt { ref targets, .. } => SliceSet::from_unsorted_slice_dedup( + arena.alloc_from_iter(targets.all_targets().iter().map(|&target| block_map.to_ordered[target])), + ), + + TerminatorKind::InlineAsm { + ref targets, unwind, .. + } => { + let targets = targets.iter().map(|&target| block_map.to_ordered[target]); + SliceSet::from_unsorted_slice(if let UnwindAction::Cleanup(cleanup) = unwind { + arena.alloc_from_iter(targets.chain([block_map.to_ordered[cleanup]])) + } else { + arena.alloc_from_iter(targets) + }) + }, + + TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::TailCall { .. } + | TerminatorKind::Call { .. } + | TerminatorKind::CoroutineDrop => SliceSet::empty(), + } + } + blocks +} + +pub trait Analysis { + /// The state type stored for each node in the graph. + type Domain; + + /// Creates a copy of a block's entry state before calling `apply_block_transform`. + fn clone_block_entry(&mut self, src: &Self::Domain, dst: &mut Self::Domain, block: OrderedBlock); + + /// Transfers the computed value from a previous block to the entry state of a successor block + /// and returns whether the successor block's state has changed. + fn transfer_domain( + &mut self, + src: &Self::Domain, + dst: &mut Self::Domain, + src_block: OrderedBlock, + dst_block: OrderedBlock, + ) -> bool; + + /// Applies the transformation function of a block to it's entry state. + fn apply_block_transform(&mut self, state: &mut Self::Domain, block: OrderedBlock); +} + +/// Runs an analysis until it reaches a fix state. +/// +/// See the module documentation for details. +pub fn run_analysis( + queue: &mut WorkQueue, + edges: &IndexSlice>, + states: &mut IndexSlice, + tmp_state: &mut A::Domain, + analysis: &mut A, +) { + debug_assert_eq!(queue.domain_size as usize, edges.len()); + debug_assert_eq!(queue.domain_size as usize, states.len()); + + queue.fill(); + while let Some(block) = queue.next() { + analysis.clone_block_entry(&states[block], tmp_state, block); + analysis.apply_block_transform(tmp_state, block); + queue.insert_sorted( + edges[block] + .iter() + .copied() + .filter(|&dst_block| analysis.transfer_domain(tmp_state, &mut states[dst_block], block, dst_block)), + ); + } +} diff --git a/clippy_mir/src/childless_projection.rs b/clippy_mir/src/childless_projection.rs new file mode 100644 index 000000000000..35ccdb697cf1 --- /dev/null +++ b/clippy_mir/src/childless_projection.rs @@ -0,0 +1,238 @@ +use core::option; +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::{Idx as _, IndexSlice}; +use rustc_middle::mir::{Body, Local, Place, PlaceElem, ProjectionElem}; +use rustc_middle::ty::{Ty, TyCtxt, TyKind, TypingEnv}; +use rustc_span::def_id::DefId; + +pub use crate::projection::{ + EMPTY_PLACE_DATA, Idx, PlaceData, PlaceFilter, ResolvedPlace, Resolver, SINGLE_PLACE_DATA, +}; + +/// Type-based interner for `ProjectionData`. +struct TyProjectionInterner<'arena, 'tcx, F> { + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + projection_data: FxHashMap, &'arena PlaceData<'arena>>, +} +impl<'arena, 'tcx, F> TyProjectionInterner<'arena, 'tcx, F> +where + F: FnMut(Ty<'tcx>) -> bool, +{ + fn new( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + ) -> Self { + Self { + tcx, + typing_env, + arena, + ty_has_value, + vis_filter, + projection_data: FxHashMap::default(), + } + } + + /// Creates a new `ProjectionData` for the given type. + fn alloc_for_ty(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + if (self.ty_has_value)(ty) { + SINGLE_PLACE_DATA + } else { + match *ty.kind() { + TyKind::Adt(def, args) if def.is_struct() => PlaceData::alloc_new( + self.arena, + false, + def.non_enum_variant().fields.iter().map(|f| { + if f.vis.is_accessible_from(self.vis_filter, self.tcx) { + let ty = f.ty(self.tcx, args); + self.intern( + self.tcx + .try_normalize_erasing_regions(self.typing_env, ty) + .unwrap_or(ty), + ) + } else { + EMPTY_PLACE_DATA + } + }), + ), + TyKind::Tuple(tys) => PlaceData::alloc_new(self.arena, false, tys.iter().map(|ty| self.intern(ty))), + _ => EMPTY_PLACE_DATA, + } + } + } + + /// Interns the `ProjectionData` for the given type. + fn intern(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + if let Some(&data) = self.projection_data.get(&ty) { + data + } else { + let data = self.alloc_for_ty(ty); + self.projection_data.insert(ty, data); + data + } + } +} + +/// A resolved place according to a childless projection map. +#[derive(Clone, Copy)] +pub enum Resolved<'arena> { + Value { + start: Idx, + data: &'arena PlaceData<'arena>, + }, + Child { + parent: Idx, + }, + Deref { + parent: Idx, + }, +} +impl<'arena> ResolvedPlace<'arena> for Resolved<'arena> { + type Resolver = Map<'arena>; + type Parents = option::IntoIter; + + #[inline] + fn values(&self) -> (Idx, &'arena PlaceData<'arena>) { + if let Self::Value { start, data } = *self { + (start, data) + } else { + (Idx::ZERO, EMPTY_PLACE_DATA) + } + } + + #[inline] + fn is_deref(&self) -> bool { + matches!(self, Self::Deref { .. }) + } + + #[inline] + fn parents(&self, _: &Map<'arena>) -> Self::Parents { + if let Self::Deref { parent } | Self::Child { parent } = *self { + Some(parent).into_iter() + } else { + None.into_iter() + } + } + + #[inline] + fn affects_any_value(&self) -> bool { + if let Self::Value { data, .. } = *self { + data.contains_values() + } else { + true + } + } + + #[inline] + fn as_scalar_value(self) -> Option { + if let Self::Value { data, start } = self + && data.contains_values() + { + debug_assert_eq!(data.value_count, 1); + debug_assert!(data.has_value); + Some(start) + } else { + None + } + } +} + +/// Mapping between local projections and the range of values they occupy. +/// +/// Like `Map`, but each place containing a value will not have any child nodes. +pub struct Map<'arena> { + local_map: &'arena IndexSlice)>, + domain_size: u32, +} +impl<'arena> Map<'arena> { + pub fn new<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + body: &Body<'tcx>, + ty_has_value: impl FnMut(Ty<'tcx>) -> bool, + vis_filter: DefId, + place_filter: &PlaceFilter<'_>, + ) -> Self { + let mut interner = TyProjectionInterner::new(tcx, typing_env, arena, ty_has_value, vis_filter); + let mut idx_count: u32 = 0u32; + let mut place_filter = place_filter.iter(); + Self { + local_map: IndexSlice::from_raw(arena.alloc_from_iter(body.local_decls.iter_enumerated().map( + |(local, local_decl)| { + let data = interner.intern( + tcx.try_normalize_erasing_regions(typing_env, local_decl.ty) + .unwrap_or(local_decl.ty), + ); + let data = if place_filter.local.is_some_and(|filter| filter == local) { + place_filter.apply_current(arena, data.fields, 0) + } else { + data + }; + let idx = idx_count; + idx_count += data.value_count; + (Idx::from_u32(idx), data) + }, + ))), + domain_size: idx_count, + } + } + + /// Gets the number of values + #[must_use] + pub fn domain_size(&self) -> usize { + self.domain_size as usize + } + + #[must_use] + pub fn resolve_slice_proj(&self, local: Local, projection: &[PlaceElem<'_>]) -> Resolved<'arena> { + let (mut idx, mut data) = self.local_map[local]; + let mut projections = projection.iter(); + while !data.has_value { + if let Some(projection) = projections.next() + && let &ProjectionElem::Field(field, _) = projection + { + // Note: if all fields contain no value then no field data will be stored. + if let Some(field) = data.fields.get(field) { + data = field.data; + idx = idx.plus(field.offset as usize); + continue; + } + data = EMPTY_PLACE_DATA; + } + break; + } + if data.has_value { + if projections + .clone() + .any(|projection| matches!(projection, ProjectionElem::Deref)) + { + return Resolved::Deref { parent: idx }; + } else if projections + .next() + .is_some_and(|projection| matches!(projection, ProjectionElem::Field(..))) + { + return Resolved::Child { parent: idx }; + } + } + Resolved::Value { data, start: idx } + } +} +impl<'arena> Resolver<'arena> for Map<'arena> { + type Resolved = Resolved<'arena>; + + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>) { + self.local_map[local] + } + + fn resolve(&self, place: Place<'_>) -> Self::Resolved { + self.resolve_slice_proj(place.local, place.projection) + } +} diff --git a/clippy_mir/src/lib.rs b/clippy_mir/src/lib.rs new file mode 100644 index 000000000000..e4706aed3b13 --- /dev/null +++ b/clippy_mir/src/lib.rs @@ -0,0 +1,18 @@ +#![feature(anonymous_lifetime_in_impl_trait)] +#![feature(cmp_minmax)] +#![feature(if_let_guard)] +#![feature(let_chains)] +#![feature(rustc_private)] + +extern crate rustc_abi; +extern crate rustc_arena; +extern crate rustc_data_structures; +extern crate rustc_index; +extern crate rustc_middle; +extern crate rustc_mir_dataflow; +extern crate rustc_span; + +pub mod analysis; +pub mod childless_projection; +pub mod projection; +pub mod value_tracking; diff --git a/clippy_mir/src/projection.rs b/clippy_mir/src/projection.rs new file mode 100644 index 000000000000..dcc1c82dc392 --- /dev/null +++ b/clippy_mir/src/projection.rs @@ -0,0 +1,675 @@ +//! Create mappings that can resolve local places to a set of tracked values. +//! +//! Starting with each local as a tree where the local is the root node, each field is a +//! child node, and sub-fields are children of their respective nodes; a projection mapping +//! will map each node to a unique index. Once constructed this mapping can be used to +//! resolve a place to it's matching value and that values child and parent values. +//! +//! The constructed map may have multiple filters which prevent nodes from being given an +//! associated index: +//! +//! * First is a visibility filter. Any field which can not be accessed from the current body will +//! not be assigned an index. This filter is not optional. +//! * Second is a type based filter. This will prevent certain types from being assigned an index, +//! but will still allow both parents and children to be given one. +//! * Third is a place based filter. This will prevent a specific place as well as both it's parents +//! and children from being assigned an index. +//! +//! # Example +//! +//! Given the following struct: +//! +//! ```rust +//! struct Foo { +//! x: u32, +//! y: (u32, i32), +//! } +//! ``` +//! +//! This will create the following tree (each node's index is in parenthesis): +//! +//! ```none +//! Foo (0) +//! / \ +//! x (1) y (2) +//! / \ +//! 0 (3) 1 (4) +//! ``` +//! +//! Places within the struct are resolved as follows: +//! +//! * Foo: +//! * parents: N/A +//! * values: 0, 1, 2, 3, 4 +//! * Foo.x: +//! * parents: 0 +//! * values: 1 +//! * Foo.y: +//! * parents: 0 +//! * values: 2, 3, 4 +//! * Foo.y.0: +//! * parents: 2, 0 +//! * values: 3 +//! * Foo.y.1: +//! * parents: 2, 0 +//! * values: 4 +//! +//! If tuples were filtered from storing a value the following tree would be constructed: +//! +//! ```none +//! Foo (0) +//! / \ +//! x (1) y +//! / \ +//! 0 (2) 1 (3) +//! ``` +//! +//! Places would be resolved as follows: +//! +//! * Foo: +//! * parents: N/A +//! * values: 0, 1, 2, 3 +//! * Foo.x: +//! * parents: 0 +//! * values: 1 +//! * Foo.y: +//! * parents: 0 +//! * values: 2, 3 +//! * Foo.y.0: +//! * parents: 0 +//! * values: 3 +//! * Foo.y.1: +//! * parents: 0 +//! * values: 4 + +use clippy_data_structures::CountedIter; +use core::ops::Range; +use core::{ptr, slice}; +use rustc_abi::FieldIdx; +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::{Idx as _, IndexSlice}; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::{Body, Local, Location, Place, ProjectionElem, Rvalue}; +use rustc_middle::ty::{Ty, TyCtxt, TyKind, TypingEnv}; +use rustc_span::def_id::DefId; + +rustc_index::newtype_index! { + /// Index to a value + #[orderable] + pub struct Idx {} +} + +#[derive(Clone, Copy)] +pub struct FieldData<'arena> { + /// The offset to use to get to the first value stored for this field. + pub offset: u32, + /// The projection data for this field. + pub data: &'arena PlaceData<'arena>, +} +impl FieldData<'_> { + /// A field with no values. + pub const EMPTY: Self = Self { + // The offset doesn't actually matter since the occupied range is empty. + offset: 0, + data: EMPTY_PLACE_DATA, + }; +} + +/// Traversal data about a node in the projection tree. +#[non_exhaustive] +pub struct PlaceData<'arena> { + /// The offset and projection data for each immediate child. + pub fields: &'arena IndexSlice>, + /// The number of values stored by this type, including all children. + pub value_count: u32, + /// Is a value stored for this type. + pub has_value: bool, +} + +// Avoid the need to allocate the two most common values. +pub static EMPTY_PLACE_DATA: &PlaceData<'_> = &PlaceData { + fields: IndexSlice::from_raw(&[]), + value_count: 0, + has_value: false, +}; +pub static SINGLE_PLACE_DATA: &PlaceData<'_> = &PlaceData { + fields: IndexSlice::from_raw(&[]), + value_count: 1, + has_value: true, +}; + +impl PartialEq for PlaceData<'_> { + #[inline] + fn eq(&self, other: &Self) -> bool { + // Most instances will be interned so use pointer equality here. + ptr::addr_eq(self, other) + } +} + +impl<'arena> PlaceData<'arena> { + #[inline] + #[must_use] + pub fn contains_values(&self) -> bool { + // No need to dereference. All empty instances are replaced with `EMPTY_PLACE_DATA`. + self != EMPTY_PLACE_DATA + } + + pub fn alloc_new( + arena: &'arena DroplessArena, + has_value: bool, + fields: impl Iterator, + ) -> &'arena Self { + let mut value_count = u32::from(has_value); + let fields = arena.alloc_from_iter(fields.map(|data| { + let offset = value_count; + value_count += data.value_count; + FieldData { offset, data } + })); + if value_count == u32::from(has_value) { + if has_value { SINGLE_PLACE_DATA } else { EMPTY_PLACE_DATA } + } else { + arena.alloc(Self { + fields: IndexSlice::from_raw(fields), + value_count, + has_value, + }) + } + } +} + +/// Type-based interner for `ProjectionData` +struct TyProjectionInterner<'arena, 'tcx, F> { + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + projection_data: FxHashMap, &'arena PlaceData<'arena>>, +} +impl<'arena, 'tcx, F> TyProjectionInterner<'arena, 'tcx, F> +where + F: FnMut(Ty<'tcx>) -> bool, +{ + fn new( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + ) -> Self { + Self { + tcx, + typing_env, + arena, + ty_has_value, + vis_filter, + projection_data: FxHashMap::default(), + } + } + + /// Creates a new `ProjectionData` for the given type. + fn alloc_for_ty(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + let has_value = (self.ty_has_value)(ty); + match *ty.kind() { + TyKind::Adt(def, args) if def.is_struct() => PlaceData::alloc_new( + self.arena, + has_value, + def.non_enum_variant().fields.iter().map(|f| { + if f.vis.is_accessible_from(self.vis_filter, self.tcx) { + let ty = f.ty(self.tcx, args); + self.intern( + self.tcx + .try_normalize_erasing_regions(self.typing_env, ty) + .unwrap_or(ty), + ) + } else { + EMPTY_PLACE_DATA + } + }), + ), + TyKind::Tuple(tys) => PlaceData::alloc_new(self.arena, has_value, tys.iter().map(|ty| self.intern(ty))), + _ if has_value => SINGLE_PLACE_DATA, + _ => EMPTY_PLACE_DATA, + } + } + + /// Interns the `ProjectionData` for the given type. + fn intern(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + if let Some(&data) = self.projection_data.get(&ty) { + data + } else { + let data = self.alloc_for_ty(ty); + self.projection_data.insert(ty, data); + data + } + } +} + +pub(crate) struct PlaceFilterIter<'a> { + iter: slice::Iter<'a, LocalPlace<'a>>, + pub local: Option, + pub projection: &'a [FieldIdx], +} +impl PlaceFilterIter<'_> { + /// Creates a new `ProjectionData` by applying the current filter. + /// + /// This will move to the next filter not affecting the current field. + pub(crate) fn apply_current<'arena>( + &mut self, + arena: &'arena DroplessArena, + fields: &'arena IndexSlice>, + depth: usize, + ) -> &'arena PlaceData<'arena> { + if let Some(&filter_field) = self.projection.get(depth) { + let filter_local = self.local; + let filter_projection = &self.projection[..depth]; + let mut filter_field: Option = Some(filter_field); + let data = PlaceData::alloc_new( + arena, + false, + fields.iter_enumerated().map(|(field, field_data)| { + if filter_field == Some(field) { + let fields = field_data.data.fields; + let data = self.apply_current(arena, fields, depth + 1); + // Get the next field to filter if the filter still has the same parent field. + filter_field = + self.projection.get(depth).copied().filter(|_| { + self.local == filter_local && self.projection.starts_with(filter_projection) + }); + data + } else { + field_data.data + } + }), + ); + // Skip to the filter after the current field. + // Note: Child fields may have been dropped before applying this filter. + while filter_field.is_some() { + (self.local, self.projection) = self + .iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + filter_field = self + .projection + .get(depth) + .copied() + .filter(|_| self.local == filter_local && self.projection.starts_with(filter_projection)); + } + data + } else { + // Found the filtered field. Step to the next filter. + (self.local, self.projection) = self + .iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + EMPTY_PLACE_DATA + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +struct LocalPlace<'arena> { + local: Local, + projection: &'arena [FieldIdx], +} +impl<'arena> LocalPlace<'arena> { + fn from_place(arena: &'arena DroplessArena, place: Place<'_>) -> Self { + Self { + local: place.local, + projection: arena.alloc_from_iter(CountedIter(place.projection.iter().map_while(|proj| { + if let ProjectionElem::Field(idx, _) = proj { + Some(idx) + } else { + None + } + }))), + } + } + + fn is_parent_of(self, other: LocalPlace) -> bool { + self.local == other.local + && self.projection.len() <= other.projection.len() + && self.projection.iter().zip(other.projection).all(|(&x, &y)| x == y) + } +} + +pub struct PlaceFilter<'a> { + filter: Vec>, +} +impl<'a> PlaceFilter<'a> { + /// Creates a filter which will remove all places that have a raw borrow taken. + pub fn new_raw_borrow_filter(arena: &'a DroplessArena, body: &Body<'_>) -> Self { + struct V<'a> { + arena: &'a DroplessArena, + borrows: Vec>, + } + impl<'tcx> Visitor<'tcx> for V<'_> { + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, _: Location) { + if let Rvalue::RawPtr(_, place) = *rvalue { + self.borrows.push(LocalPlace::from_place(self.arena, place)); + } + } + } + let mut v = V { + arena, + borrows: Vec::new(), + }; + for (block, block_data) in body.basic_blocks.iter_enumerated() { + v.visit_basic_block_data(block, block_data); + } + v.borrows.sort(); + // Remove sub-field filters when the parent field is also filtered. + // Not doing so will break the filtering algorithm. + v.borrows.dedup_by(|&mut second, &mut first| first.is_parent_of(second)); + Self { filter: v.borrows } + } + + #[expect(clippy::iter_not_returning_iterator)] + pub(crate) fn iter(&self) -> PlaceFilterIter<'_> { + let mut iter = self.filter.iter(); + let (local, projection) = iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + PlaceFilterIter { + iter, + local, + projection, + } + } +} + +#[derive(Clone)] +struct ResolvedParentsField<'a> { + fields: slice::Iter<'a, FieldData<'a>>, + /// The base index to use for all fields. + idx: Idx, + /// The parent index to use for all fields. + parent: Option, +} +struct ResolvedParents<'a> { + locals: slice::Iter<'a, (Idx, &'a PlaceData<'a>)>, + parents: Vec>, + current: ResolvedParentsField<'a>, + hint: u32, +} +impl<'a> ResolvedParents<'a> { + fn new(locals: &'a IndexSlice)>, hint: u32) -> Self { + Self { + locals: locals.iter(), + parents: Vec::new(), + current: ResolvedParentsField { + fields: [].iter(), + idx: Idx::ZERO, + parent: None, + }, + hint, + } + } +} +impl Iterator for ResolvedParents<'_> { + type Item = Option; + fn next(&mut self) -> Option { + loop { + if let Some(field) = self.current.fields.next() { + self.parents.push(self.current.clone()); + let parent = self.current.parent; + self.current = ResolvedParentsField { + fields: field.data.fields.iter(), + idx: self.current.idx.plus(field.offset as usize), + parent: None, + }; + if field.data.has_value { + self.current.parent = Some(self.current.idx); + return Some(parent); + } + } else if let Some(field) = self.parents.pop() { + self.current = field; + } else { + let &(idx, projection) = self.locals.by_ref().find(|&(_, data)| data.contains_values())?; + self.current = ResolvedParentsField { + fields: projection.fields.iter(), + idx, + parent: self.parents.last().and_then(|x| x.parent), + }; + if projection.has_value { + self.current.parent = Some(self.current.idx); + return Some(None); + } + } + } + } + + /// Pass the size to `DroplessArena::alloc_from_iter` + fn size_hint(&self) -> (usize, Option) { + (self.hint as usize, Some(self.hint as usize)) + } +} + +/// A place which has been resolved by a projection map. +pub trait ResolvedPlace<'arena>: Copy { + type Resolver; + type Parents: Iterator; + + /// Gets the first value index and the projection data for the place. + fn values(&self) -> (Idx, &'arena PlaceData<'arena>); + + /// whether the place involve a deref projection. + fn is_deref(&self) -> bool; + + /// The parents of the place from most to least specific. + fn parents(&self, map: &Self::Resolver) -> Self::Parents; + + // Checks if this place affects any values. + fn affects_any_value(&self) -> bool; + + /// Gets the contained value assuming the place refers to a scalar value. + /// + /// # Panics + /// This may panic if this place contains multiple values. + fn as_scalar_value(self) -> Option; +} + +pub trait Resolver<'arena> { + type Resolved: ResolvedPlace<'arena, Resolver = Self>; + + /// Resolves the place to the set of values it contains. + fn resolve(&self, place: Place<'_>) -> Self::Resolved; + /// Resolves the local to the set of values it contains. + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>); + + /// Gets the set of values contained in the body's arguments. + fn resolve_args(&self, body: &Body<'_>) -> Range { + if body.arg_count > 0 { + let (args_start, _) = self.resolve_local(Local::from_u32(1)); + let (args_end, args_data) = self.resolve_local(Local::from_usize(1 + body.arg_count)); + args_start..args_end.plus(args_data.value_count as usize) + } else { + Idx::ZERO..Idx::ZERO + } + } +} + +#[derive(Clone)] +pub struct ParentIter<'a> { + parent_map: &'a IndexSlice>, + next: Option, +} +impl Iterator for ParentIter<'_> { + type Item = Idx; + fn next(&mut self) -> Option { + match self.next { + Some(x) => { + self.next = self.parent_map[x]; + Some(x) + }, + None => None, + } + } +} + +/// A place which has been resolved by a projection map. +#[derive(Clone, Copy)] +pub enum Resolved<'arena> { + Value { + data: &'arena PlaceData<'arena>, + parent: Option, + idx: Idx, + }, + Deref { + parent: Idx, + }, +} +impl<'arena> ResolvedPlace<'arena> for Resolved<'arena> { + type Resolver = Map<'arena>; + type Parents = ParentIter<'arena>; + + #[inline] + fn values(&self) -> (Idx, &'arena PlaceData<'arena>) { + if let Self::Value { data, idx, .. } = *self { + (idx, data) + } else { + (Idx::ZERO, EMPTY_PLACE_DATA) + } + } + + #[inline] + fn is_deref(&self) -> bool { + matches!(self, Self::Deref { .. }) + } + + #[inline] + fn parents(&self, map: &Map<'arena>) -> Self::Parents { + ParentIter { + parent_map: map.parent_map, + next: match *self { + Self::Value { parent, .. } => parent, + Self::Deref { parent } => Some(parent), + }, + } + } + + #[inline] + fn affects_any_value(&self) -> bool { + if let Self::Value { data, parent, .. } = *self { + data.contains_values() || parent.is_some() + } else { + true + } + } + + #[inline] + fn as_scalar_value(self) -> Option { + match self { + Self::Value { data, idx, .. } => { + debug_assert_eq!(data.value_count, u32::from(data.has_value)); + data.has_value.then_some(idx) + }, + Self::Deref { .. } => None, + } + } +} + +/// Mapping between local projections and the range of values they occupy. +pub struct Map<'arena> { + local_map: &'arena IndexSlice)>, + parent_map: &'arena IndexSlice>, +} +impl<'arena> Map<'arena> { + pub fn new<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + body: &Body<'tcx>, + ty_has_value: impl FnMut(Ty<'tcx>) -> bool, + vis_filter: DefId, + place_filter: &PlaceFilter<'_>, + ) -> Self { + let mut interner = TyProjectionInterner::new(tcx, typing_env, arena, ty_has_value, vis_filter); + let mut idx_count: u32 = 0u32; + let mut place_filter = place_filter.iter(); + let local_map = IndexSlice::::from_raw(arena.alloc_from_iter( + body.local_decls.iter_enumerated().map(|(local, local_decl)| { + let data = interner.intern( + tcx.try_normalize_erasing_regions(typing_env, local_decl.ty) + .unwrap_or(local_decl.ty), + ); + let data = if place_filter.local.is_some_and(|filter| filter == local) { + place_filter.apply_current(arena, data.fields, 0) + } else { + data + }; + let idx = idx_count; + idx_count += data.value_count; + (Idx::from_u32(idx), data) + }), + )); + let parent_map = + IndexSlice::::from_raw(arena.alloc_from_iter(ResolvedParents::new(local_map, idx_count))); + Self { local_map, parent_map } + } + + /// Gets the number of values + #[must_use] + pub fn domain_size(&self) -> usize { + self.parent_map.len() + } + + #[must_use] + #[expect(clippy::cast_possible_truncation)] + pub fn domain_size_u32(&self) -> u32 { + self.parent_map.len() as u32 + } + + #[must_use] + pub fn local_for_idx(&self, idx: Idx) -> Local { + let mut res = Local::ZERO; + for (l, &(x, data)) in self.local_map.iter_enumerated() { + if data.has_value { + if x <= idx { + res = l; + } else { + break; + } + } + } + res + } +} +impl<'arena> Resolver<'arena> for Map<'arena> { + type Resolved = Resolved<'arena>; + + fn resolve(&self, place: Place) -> Self::Resolved { + let (mut idx, mut data) = self.local_map[place.local]; + let mut parent = None; + let mut projections = place.projection.iter(); + while let Some(projection) = projections.next() { + if data.has_value { + parent = Some(idx); + } + if let ProjectionElem::Field(field, _) = projection { + // Note: if all fields contain no value then no field data will be stored. + if let Some(field) = data.fields.get(field) { + data = field.data; + idx = idx.plus(field.offset as usize); + continue; + } + data = EMPTY_PLACE_DATA; + } + if let Some(parent) = parent + && (matches!(projection, ProjectionElem::Deref) + || projections.any(|projection| matches!(projection, ProjectionElem::Deref))) + { + return Resolved::Deref { parent }; + } + // At this point we either have a deref of an untracked value, or a projection + // that stays within the local. + break; + } + Resolved::Value { data, parent, idx } + } + + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>) { + self.local_map[local] + } +} diff --git a/clippy_mir/src/value_tracking.rs b/clippy_mir/src/value_tracking.rs new file mode 100644 index 000000000000..9fb1d47745e2 --- /dev/null +++ b/clippy_mir/src/value_tracking.rs @@ -0,0 +1,668 @@ +use crate::projection::{self, PlaceData, ResolvedPlace as _, Resolver}; +use core::ops::Range; +use rustc_abi::{FieldIdx, VariantIdx}; +use rustc_index::{Idx, IndexSlice}; +use rustc_middle::mir::{ + AggregateKind, BasicBlockData, BinOp, Body, BorrowKind, CastKind, ConstOperand, CopyNonOverlapping, + InlineAsmOperand, Local, NonDivergingIntrinsic, NullOp, Operand, Place, RETURN_PLACE, RawPtrKind, Rvalue, + Statement, StatementKind, Terminator, TerminatorKind, UnOp, +}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::source_map::Spanned; +use rustc_span::{Span, sym}; + +/// Visitor for tracking the movement of values within a MIR body. +pub trait Visitor<'arena, 'tcx>: Sized { + type Resolver: Resolver<'arena>; + fn resolver(&self) -> &Self::Resolver; + + /// Gets the `TyCtxt` this visitor instance is associated with. + fn tcx(&self) -> TyCtxt<'tcx>; + + /// Gets the MIR body this visitor instance is associated with. + fn body(&self) -> &Body<'tcx>; + + /// Visits a read of an individual value. + fn visit_read_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a mutation of an individual value. + fn visit_mutate_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a write of `uninit` bytes to an individual value. + fn visit_uninit_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a copy of one value to another. + fn visit_copy_idx(&mut self, dst: projection::Idx, src: projection::Idx, sp: Span); + + /// Visits a move of one value to another. + fn visit_move_idx(&mut self, dst: projection::Idx, src: projection::Idx, sp: Span); + + /// Visits a move of an individual value to an unknown place. + /// + /// Default to calling `visit_read_idx` followed by `visit_uninit_idx`. + #[inline] + fn visit_consume_idx(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_idx(idx, sp); + self.visit_uninit_idx(idx, sp); + } + + /// Visits a read of a set of values. + #[inline] + fn visit_read_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_read_idx(i, sp); + } + } + + /// Visits a mutation of a set of values. + #[inline] + fn visit_mutate_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_mutate_idx(i, sp); + } + } + + /// Visits a write of `uninit` bytes to a set of values. + #[inline] + fn visit_uninit_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_uninit_idx(i, sp); + } + } + + /// Visits a copy from one set of values to another. + #[inline] + fn visit_copy_range(&mut self, dst: projection::Idx, src: Range, sp: Span) { + for (dst, src) in (dst..).zip(src) { + self.visit_copy_idx(dst, src, sp); + } + } + + /// Visits a move from one set of values to another. + #[inline] + fn visit_move_range(&mut self, dst: projection::Idx, src: Range, sp: Span) { + for (dst, src) in (dst..).zip(src) { + self.visit_move_idx(dst, src, sp); + } + } + + /// Visits a move of a set of values to an unknown place. + #[inline] + fn visit_consume_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_consume_idx(i, sp); + } + } + + /// Visits the parent of a read field. + #[inline] + fn visit_read_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_idx(idx, sp); + } + + /// Visits the parent of a mutated field. + #[inline] + fn visit_mutate_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_mutate_idx(idx, sp); + } + + /// Visits the parent of a consumed field. + /// + /// Defaults to calling `visit_read_parent` followed by `visit_mutate_parent`. + #[inline] + fn visit_consume_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_parent(idx, sp); + self.visit_mutate_parent(idx, sp); + } + + /// Visits a read of a resolved place. + /// + /// Defaults to calling `visit_read_range` for the contained values and `visit_read_idx` for + /// each parent. + fn visit_read_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_read_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + } + + /// Visits a mutation of a resolved place. + /// + /// Defaults to calling `visit_mutate_range` for the contained values and `visit_mutate_idx` for + /// each parent. + fn visit_mutate_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_mutate_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a write of `uninit` bytes to a resolved place. + /// + /// Defaults to calling `visit_uninit_range` for the contained values and `visit_mutate_idx` for + /// each parent. + fn visit_uninit_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_uninit_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a copy from one resolved place to another. + fn visit_copy_place(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + let dst = self.resolver().resolve(dst); + let src = self.resolver().resolve(src); + let (dst_start, dst_data) = dst.values(); + let (src_start, src_data) = src.values(); + if dst_data.contains_values() || src_data.contains_values() { + copy_place(self, Copy, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a move from one resolved place to another. + fn visit_move_place(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + let dst = self.resolver().resolve(dst); + let src = self.resolver().resolve(src); + let (dst_start, dst_data) = dst.values(); + let (src_start, src_data) = src.values(); + if dst_data.contains_values() || src_data.contains_values() { + copy_place(self, Move, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(self.resolver()) { + self.visit_consume_parent(idx, sp); + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a move from a resolved place to an unknown location. + /// + /// Defaults to calling `visit_consume_range` followed by `visit_read_idx` and + /// `visit_mutate_idx` for each parent. + fn visit_consume_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_consume_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_consume_parent(idx, sp); + } + } + + /// Visits a drop of a resolved place. + /// + /// Defaults to calling `visit_uninit_place`. + #[inline] + fn visit_drop_place(&mut self, place: Place<'tcx>, sp: Span) { + self.visit_uninit_place(place, sp); + } + + #[inline] + fn visit_uninit_local(&mut self, local: Local, sp: Span) { + let (start, data) = self.resolver().resolve_local(local); + if data.contains_values() { + self.visit_uninit_range(start..start.plus(data.value_count as usize), sp); + } + } + + #[inline] + fn visit_consume_local(&mut self, local: Local, sp: Span) { + let (start, data) = self.resolver().resolve_local(local); + if data.contains_values() { + self.visit_consume_range(start..start.plus(data.value_count as usize), sp); + } + } + + #[inline] + fn visit_assign_constant_field( + &mut self, + dst_start: projection::Idx, + dst_data: &PlaceData<'_>, + _src: &ConstOperand<'tcx>, + sp: Span, + ) { + if dst_data.contains_values() { + self.visit_mutate_range(dst_start..dst_start.plus(dst_data.value_count as usize), sp); + } + } + + fn visit_assign_aggregate( + &mut self, + dst: Place<'tcx>, + _kind: &AggregateKind<'tcx>, + ops: &IndexSlice>, + sp: Span, + ) { + let dst = self.resolver().resolve(dst); + let (dst_start, dst_data) = dst.values(); + if dst_data.contains_values() && dst_data.value_count > u32::from(dst_data.has_value) { + debug_assert_eq!(dst_data.fields.len(), ops.len()); + for (dst_field, op) in dst_data.fields.iter().zip(ops) { + if dst_field.data.contains_values() { + let dst = dst_start.plus(dst_field.offset as usize); + match op { + &Operand::Copy(src) => { + copy_aggregate_field(self, Copy, dst, dst_field.data, self.resolver().resolve(src), sp); + }, + &Operand::Move(src) => { + copy_aggregate_field(self, Move, dst, dst_field.data, self.resolver().resolve(src), sp); + }, + Operand::Constant(src) => self.visit_assign_constant_field(dst, dst_field.data, src, sp), + } + } + } + } else { + for op in ops { + walk_operand(self, op, sp); + } + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_idx(idx, sp); + } + } + + #[inline] + fn visit_assign_unary_op(&mut self, dst: Place<'tcx>, _op: UnOp, src: &Operand<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_binary_op( + &mut self, + dst: Place<'tcx>, + _op: BinOp, + (lhs, rhs): &(Operand<'tcx>, Operand<'tcx>), + sp: Span, + ) { + walk_operand(self, lhs, sp); + walk_operand(self, rhs, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_cast(&mut self, dst: Place<'tcx>, _kind: CastKind, src: &Operand<'tcx>, _ty: Ty<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_len(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + self.visit_read_place(src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_discriminant(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + self.visit_read_place(src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_null_op(&mut self, dst: Place<'tcx>, _op: &NullOp<'tcx>, _ty: Ty<'tcx>, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_raw_ptr(&mut self, dst: Place<'tcx>, src: Place<'tcx>, _kind: RawPtrKind, sp: Span) { + // A raw borrow can invalidate any value tracking done unless special care is taken. + debug_assert!( + !self.resolver().resolve(src).affects_any_value(), + "A raw borrow of a tracked place was taken at `{sp:?}`. \ + Use `clippy_mir::projection::create_raw_borrow_filter` to filter out these places.", + ); + self.visit_mutate_place(dst, sp); + } + + fn visit_assign_borrow(&mut self, dst: Place<'tcx>, src: Place<'tcx>, kind: BorrowKind, sp: Span) { + let src = self.resolver().resolve(src); + let (src_start, src_data) = src.values(); + if src_data.contains_values() { + let src_range = src_start..src_start.plus(src_data.value_count as usize); + if matches!(kind, BorrowKind::Mut { .. }) { + self.visit_mutate_range(src_range.clone(), sp); + } + self.visit_read_range(src_range, sp); + } + if matches!(kind, BorrowKind::Mut { .. }) { + for idx in src.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + for idx in src.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_shallow_box(&mut self, dst: Place<'tcx>, src: &Operand<'tcx>, _ty: Ty<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_constant(&mut self, dst: Place<'tcx>, _src: &ConstOperand<'tcx>, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + fn visit_assignment(&mut self, stmt: &(Place<'tcx>, Rvalue<'tcx>), sp: Span) { + let dst = stmt.0; + match &stmt.1 { + Rvalue::Aggregate(kind, ops) => self.visit_assign_aggregate(dst, kind, ops, sp), + &Rvalue::UnaryOp(op, ref src) => self.visit_assign_unary_op(dst, op, src, sp), + &Rvalue::Cast(kind, ref src, ty) => self.visit_assign_cast(dst, kind, src, ty, sp), + &Rvalue::Len(src) => self.visit_assign_len(dst, src, sp), + &Rvalue::BinaryOp(kind, ref ops) => self.visit_assign_binary_op(dst, kind, ops, sp), + &Rvalue::Discriminant(src) => self.visit_assign_discriminant(dst, src, sp), + &Rvalue::CopyForDeref(src) => self.visit_copy_place(dst, src, sp), + &Rvalue::NullaryOp(ref op, ty) => self.visit_assign_null_op(dst, op, ty, sp), + &Rvalue::RawPtr(kind, src) => self.visit_assign_raw_ptr(dst, src, kind, sp), + &Rvalue::Ref(_, kind, src) => self.visit_assign_borrow(dst, src, kind, sp), + Rvalue::Repeat(value, _) => { + walk_operand(self, value, sp); + self.visit_mutate_place(dst, sp); + }, + &Rvalue::ShallowInitBox(ref src, ty) => self.visit_assign_shallow_box(dst, src, ty, sp), + Rvalue::Use(src) => match src { + &Operand::Move(src) => self.visit_move_place(dst, src, sp), + &Operand::Copy(src) => self.visit_copy_place(dst, src, sp), + Operand::Constant(src) => self.visit_assign_constant(dst, src, sp), + }, + Rvalue::ThreadLocalRef(_) => self.visit_mutate_place(dst, sp), + Rvalue::WrapUnsafeBinder(op, _) => walk_operand(self, op, sp), + } + } + + fn visit_copy_nonoverlapping(&mut self, args: &CopyNonOverlapping<'tcx>, sp: Span) { + walk_operand(self, &args.src, sp); + walk_operand(self, &args.dst, sp); + walk_operand(self, &args.count, sp); + if let Operand::Copy(dst) = args.dst { + self.visit_mutate_place(dst, sp); + } + } + + #[inline] + fn visit_set_discriminant(&mut self, dst: Place<'tcx>, _variant: VariantIdx, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + fn visit_statement(&mut self, stmt: &Statement<'tcx>) { + let sp = stmt.source_info.span; + match &stmt.kind { + StatementKind::Assign(stmt) => self.visit_assignment(stmt, sp), + &StatementKind::SetDiscriminant { + ref place, + variant_index, + } => self.visit_set_discriminant(**place, variant_index, sp), + StatementKind::Intrinsic(i) => { + if let NonDivergingIntrinsic::CopyNonOverlapping(copy) = &**i { + self.visit_copy_nonoverlapping(copy, sp); + } + }, + &(StatementKind::StorageLive(local) | StatementKind::StorageDead(local)) => { + // Note: `StorageLive` on a live local fills it with uninit bytes. + self.visit_uninit_local(local, sp); + }, + StatementKind::Deinit(place) => self.visit_uninit_place(**place, sp), + StatementKind::FakeRead(..) + | StatementKind::Retag(..) + | StatementKind::PlaceMention(..) + | StatementKind::AscribeUserType(..) + | StatementKind::Coverage(..) + | StatementKind::ConstEvalCounter + | StatementKind::Nop + | StatementKind::BackwardIncompatibleDropHint { .. } => {}, + } + } + + /// Visits a `Call` terminator. + /// + /// By default this will treat calls to `core::mem::drop` the same as a `Drop` terminator. + #[inline] + fn visit_call(&mut self, func: &Operand<'tcx>, args: &[Spanned>], dst: &Place<'tcx>, sp: Span) { + walk_call(self, func, args, dst, sp); + } + + /// Visits a `TailCall` terminator. + /// + /// By default this will treat calls to `core::mem::drop` the same as a `Drop` terminator. + #[inline] + fn visit_tail_call(&mut self, func: &Operand<'tcx>, args: &[Spanned>], sp: Span) { + walk_tail_call(self, func, args, sp); + } + + #[inline] + fn visit_inline_asm(&mut self, ops: &[InlineAsmOperand<'tcx>], sp: Span) { + walk_inline_asm(self, ops, sp); + } + + fn visit_terminator(&mut self, term: &Terminator<'tcx>) { + let sp = term.source_info.span; + match &term.kind { + TerminatorKind::Assert { cond: value, .. } | TerminatorKind::Yield { value, .. } => { + walk_operand(self, value, sp); + }, + TerminatorKind::Call { + func, + args, + destination, + .. + } => self.visit_call(func, args, destination, sp), + TerminatorKind::TailCall { func, args, .. } => self.visit_tail_call(func, args, sp), + TerminatorKind::InlineAsm { operands, .. } => self.visit_inline_asm(operands, sp), + &TerminatorKind::Drop { place, .. } => self.visit_drop_place(place, sp), + TerminatorKind::Return => self.visit_consume_local(RETURN_PLACE, sp), + TerminatorKind::SwitchInt { discr, .. } => walk_operand(self, discr, sp), + TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate { .. } + | TerminatorKind::Unreachable + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => {}, + } + } + + fn visit_block_data(&mut self, block: &BasicBlockData<'tcx>) { + for stmt in &block.statements { + self.visit_statement(stmt); + } + if let Some(term) = &block.terminator { + self.visit_terminator(term); + } + } +} + +pub trait CopyVisitor<'arena, 'tcx, V: Visitor<'arena, 'tcx>>: core::marker::Copy { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span); + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span); + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span); + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span); +} + +#[derive(Clone, Copy)] +pub struct Move; +impl<'arena, 'tcx, V: Visitor<'arena, 'tcx>> CopyVisitor<'arena, 'tcx, V> for Move { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span) { + visitor.visit_move_range(dst, src, sp); + } + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span) { + visitor.visit_move_idx(dst, src, sp); + } + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span) { + visitor.visit_consume_range(range, sp); + } + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span) { + visitor.visit_consume_idx(idx, sp); + } +} + +#[derive(Clone, Copy)] +pub struct Copy; +impl<'arena, 'tcx, V: Visitor<'arena, 'tcx>> CopyVisitor<'arena, 'tcx, V> for Copy { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span) { + visitor.visit_copy_range(dst, src, sp); + } + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span) { + visitor.visit_copy_idx(dst, src, sp); + } + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span) { + visitor.visit_read_range(range, sp); + } + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span) { + visitor.visit_read_idx(idx, sp); + } +} + +fn copy_aggregate_field<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst_start: projection::Idx, + dst_data: &PlaceData<'_>, + src: >::Resolved, + sp: Span, +) { + let (src_start, src_data) = src.values(); + if src_data.contains_values() { + copy_place(visitor, copy_visitor, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(visitor.resolver()) { + visitor.visit_read_parent(idx, sp); + } +} + +pub fn copy_place_fields<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst: projection::Idx, + dst_data: &PlaceData<'_>, + src: projection::Idx, + src_data: &PlaceData<'_>, + sp: Span, +) { + for (dst_field, src_field) in dst_data.fields.iter().zip(src_data.fields) { + let dst_field_start = dst.plus(dst_field.offset as usize); + let src_field_start = src.plus(src_field.offset as usize); + copy_place( + visitor, + copy_visitor, + dst_field_start, + dst_field.data, + src_field_start, + src_field.data, + sp, + ); + } +} + +pub fn copy_place<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst: projection::Idx, + dst_data: &PlaceData<'_>, + src: projection::Idx, + src_data: &PlaceData<'_>, + sp: Span, +) { + let src_end = src.plus(src_data.value_count as usize); + if dst_data == src_data { + copy_visitor.copy_range(visitor, dst, src..src_end, sp); + } else if !dst_data.contains_values() { + copy_visitor.read_src_range(visitor, src..src_end, sp); + } else if !src_data.contains_values() { + visitor.visit_mutate_range(dst..dst.plus(dst_data.value_count as usize), sp); + } else { + debug_assert_eq!(dst_data.fields.len(), src_data.fields.len()); + match (dst_data.has_value, src_data.has_value) { + (true, true) => copy_visitor.copy_idx(visitor, dst, src, sp), + (true, false) => visitor.visit_mutate_idx(dst, sp), + (false, true) => copy_visitor.read_src_idx(visitor, src, sp), + (false, false) => {}, + } + copy_place_fields(visitor, copy_visitor, dst, dst_data, src, src_data, sp); + } +} + +pub fn walk_operand<'tcx>(visitor: &mut impl Visitor<'_, 'tcx>, op: &Operand<'tcx>, sp: Span) { + match *op { + Operand::Move(place) => visitor.visit_consume_place(place, sp), + Operand::Copy(place) => visitor.visit_read_place(place, sp), + Operand::Constant(_) => {}, + } +} + +/// Walks a `Call` terminator. +/// +/// This will treat calls to `core::mem::drop` the same as a `Drop` terminator. +pub fn walk_call<'tcx>( + visitor: &mut impl Visitor<'_, 'tcx>, + func: &Operand<'tcx>, + args: &[Spanned>], + dst: &Place<'tcx>, + sp: Span, +) { + walk_tail_call(visitor, func, args, sp); + visitor.visit_mutate_place(*dst, sp); +} + +/// Walks a `TailCall` terminator. +/// +/// This will treat calls to `core::mem::drop` the same as a `Drop` terminator. +pub fn walk_tail_call<'tcx>( + visitor: &mut impl Visitor<'_, 'tcx>, + func: &Operand<'tcx>, + args: &[Spanned>], + sp: Span, +) { + if let [arg] = args + && let Operand::Move(arg) = arg.node + && let ty::FnDef(fn_id, _) = *func.ty(visitor.body(), visitor.tcx()).kind() + && visitor.tcx().is_diagnostic_item(sym::mem_drop, fn_id) + { + visitor.visit_drop_place(arg, sp); + } else { + walk_operand(visitor, func, sp); + for arg in args { + walk_operand(visitor, &arg.node, arg.span); + } + } +} + +pub fn walk_inline_asm<'tcx>(visitor: &mut impl Visitor<'_, 'tcx>, operands: &[InlineAsmOperand<'tcx>], sp: Span) { + for op in operands { + if let InlineAsmOperand::In { value, .. } | InlineAsmOperand::InOut { in_value: value, .. } = op { + walk_operand(visitor, value, sp); + } + } + for op in operands { + if let InlineAsmOperand::Out { place: Some(place), .. } + | InlineAsmOperand::InOut { + out_place: Some(place), .. + } = *op + { + visitor.visit_mutate_place(place, sp); + } + } +} diff --git a/tests/dogfood.rs b/tests/dogfood.rs index 92ee08f6a582..e180984c5787 100644 --- a/tests/dogfood.rs +++ b/tests/dogfood.rs @@ -39,6 +39,7 @@ fn dogfood() { "clippy_dev", "clippy_lints_internal", "clippy_lints", + "clippy_mir", "clippy_utils", "clippy_config", "lintcheck",