Skip to content

Commit aaa7964

Browse files
committed
Fix issues pointed out in PR
* Change order of type parameters * Handle null case for `alloc` * Run rustfmt
1 parent 78945d0 commit aaa7964

File tree

3 files changed

+61
-57
lines changed

3 files changed

+61
-57
lines changed

src/map.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable, Global};
1+
use crate::raw::{Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable};
22
use crate::CollectionAllocErr;
33
use core::borrow::Borrow;
44
use core::fmt::{self, Debug};
@@ -193,7 +193,7 @@ pub enum DefaultHashBuilder {}
193193
#[derive(Clone)]
194194
pub struct HashMap<K, V, S = DefaultHashBuilder> {
195195
pub(crate) hash_builder: S,
196-
pub(crate) table: RawTable<Global, (K, V)>,
196+
pub(crate) table: RawTable<(K, V), Global>,
197197
}
198198

199199
#[cfg_attr(feature = "inline-more", inline)]
@@ -1146,7 +1146,7 @@ impl<K, V> IterMut<'_, K, V> {
11461146
/// [`into_iter`]: struct.HashMap.html#method.into_iter
11471147
/// [`HashMap`]: struct.HashMap.html
11481148
pub struct IntoIter<K, V> {
1149-
inner: RawIntoIter<Global, (K, V)>,
1149+
inner: RawIntoIter<(K, V), Global>,
11501150
}
11511151

11521152
impl<K, V> IntoIter<K, V> {
@@ -1222,7 +1222,7 @@ impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
12221222
/// [`drain`]: struct.HashMap.html#method.drain
12231223
/// [`HashMap`]: struct.HashMap.html
12241224
pub struct Drain<'a, K, V> {
1225-
inner: RawDrain<'a, Global, (K, V)>,
1225+
inner: RawDrain<'a, (K, V), Global>,
12261226
}
12271227

12281228
impl<K, V> Drain<'_, K, V> {
@@ -1280,7 +1280,7 @@ pub enum RawEntryMut<'a, K, V, S> {
12801280
/// [`RawEntryMut`]: enum.RawEntryMut.html
12811281
pub struct RawOccupiedEntryMut<'a, K, V> {
12821282
elem: Bucket<(K, V)>,
1283-
table: &'a mut RawTable<Global, (K, V)>,
1283+
table: &'a mut RawTable<(K, V), Global>,
12841284
}
12851285

12861286
unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
@@ -1301,7 +1301,7 @@ where
13011301
///
13021302
/// [`RawEntryMut`]: enum.RawEntryMut.html
13031303
pub struct RawVacantEntryMut<'a, K, V, S> {
1304-
table: &'a mut RawTable<Global, (K, V)>,
1304+
table: &'a mut RawTable<(K, V), Global>,
13051305
hash_builder: &'a S,
13061306
}
13071307

src/raw/alloc.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ mod inner {
77

88
#[cfg(not(feature = "nightly"))]
99
mod inner {
10+
use crate::alloc::alloc::{alloc, dealloc, Layout};
1011
use core::ptr::NonNull;
11-
use crate::alloc::alloc::{Layout, alloc, dealloc};
1212

1313
pub trait Alloc {
1414
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()>;
@@ -19,11 +19,10 @@ mod inner {
1919
pub struct Global;
2020
impl Alloc for Global {
2121
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
22-
Ok(NonNull::new_unchecked(alloc(layout)))
22+
NonNull::new(alloc(layout)).ok_or(())
2323
}
2424
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
2525
dealloc(ptr.as_ptr(), layout)
2626
}
2727
}
2828
}
29-

src/raw/mod.rs

Lines changed: 53 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::alloc::alloc::{Layout, handle_alloc_error};
1+
use crate::alloc::alloc::{handle_alloc_error, Layout};
22
use crate::scopeguard::guard;
33
use crate::CollectionAllocErr;
44
use core::hint;
@@ -32,8 +32,8 @@ cfg_if! {
3232
}
3333

3434
mod alloc;
35-
pub use self::alloc::Global;
3635
use self::alloc::Alloc;
36+
pub use self::alloc::Global;
3737

3838
mod bitmask;
3939

@@ -332,7 +332,7 @@ impl<T> Bucket<T> {
332332
}
333333

334334
/// A raw hash table with an unsafe API.
335-
pub struct RawTable<A: Alloc + Copy, T> {
335+
pub struct RawTable<T, A: Alloc + Clone> {
336336
// Mask to get an index from a hash value. The value is one less than the
337337
// number of buckets in the table.
338338
bucket_mask: usize,
@@ -355,16 +355,14 @@ pub struct RawTable<A: Alloc + Copy, T> {
355355
alloc: A,
356356
}
357357

358-
impl<A: Alloc + Copy, T> RawTable<A, T> {
358+
impl<T, A: Alloc + Clone> RawTable<T, A> {
359359
/// Creates a new empty hash table without allocating any memory.
360360
///
361361
/// In effect this returns a table with exactly 1 bucket. However we can
362362
/// leave the data pointer dangling since that bucket is never written to
363363
/// due to our load factor forcing us to always have at least 1 free bucket.
364364
#[cfg_attr(feature = "inline-more", inline)]
365-
pub fn new(
366-
alloc: A,
367-
) -> Self {
365+
pub fn new(alloc: A) -> Self {
368366
Self {
369367
data: NonNull::dangling(),
370368
// Be careful to cast the entire slice to a raw pointer.
@@ -389,7 +387,8 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
389387
debug_assert!(buckets.is_power_of_two());
390388
let (layout, data_offset) =
391389
calculate_layout::<T>(buckets).ok_or_else(|| fallability.capacity_overflow())?;
392-
let ctrl = alloc.alloc(layout)
390+
let ctrl = alloc
391+
.alloc(layout)
393392
.map_err(|_| fallability.alloc_err(layout))?;
394393
let data = NonNull::new_unchecked(ctrl.as_ptr().add(data_offset) as *mut T);
395394
Ok(Self {
@@ -436,7 +435,8 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
436435
unsafe fn free_buckets(&mut self) {
437436
let (layout, _) =
438437
calculate_layout::<T>(self.buckets()).unwrap_or_else(|| hint::unreachable_unchecked());
439-
self.alloc.dealloc(NonNull::new_unchecked(self.ctrl.as_ptr()), layout);
438+
self.alloc
439+
.dealloc(NonNull::new_unchecked(self.ctrl.as_ptr()), layout);
440440
}
441441

442442
/// Returns the index of a bucket from a `Bucket`.
@@ -605,7 +605,7 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
605605
// space for.
606606
let min_size = usize::max(self.items, min_size);
607607
if min_size == 0 {
608-
*self = Self::new(self.alloc);
608+
*self = Self::new(self.alloc.clone());
609609
return;
610610
}
611611

@@ -622,7 +622,7 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
622622
if min_buckets < self.buckets() {
623623
// Fast path if the table is empty
624624
if self.items == 0 {
625-
*self = Self::with_capacity(self.alloc, min_size)
625+
*self = Self::with_capacity(self.alloc.clone(), min_size)
626626
} else {
627627
self.resize(min_size, hasher, Fallibility::Infallible)
628628
.unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
@@ -797,7 +797,7 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
797797
debug_assert!(self.items <= capacity);
798798

799799
// Allocate and initialize the new table.
800-
let mut new_table = Self::try_with_capacity(self.alloc, capacity, fallability)?;
800+
let mut new_table = Self::try_with_capacity(self.alloc.clone(), capacity, fallability)?;
801801
new_table.growth_left -= self.items;
802802
new_table.items = self.items;
803803

@@ -962,10 +962,10 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
962962
/// outlives the `RawDrain`. Because we cannot make the `next` method unsafe
963963
/// on the `RawDrain`, we have to make the `drain` method unsafe.
964964
#[cfg_attr(feature = "inline-more", inline)]
965-
pub unsafe fn drain(&mut self) -> RawDrain<'_, A, T> {
965+
pub unsafe fn drain(&mut self) -> RawDrain<'_, T, A> {
966966
RawDrain {
967967
iter: self.iter(),
968-
table: ManuallyDrop::new(mem::replace(self, Self::new(self.alloc))),
968+
table: ManuallyDrop::new(mem::replace(self, Self::new(self.alloc.clone()))),
969969
orig_table: NonNull::from(self),
970970
marker: PhantomData,
971971
}
@@ -987,18 +987,22 @@ impl<A: Alloc + Copy, T> RawTable<A, T> {
987987
}
988988
}
989989

990-
unsafe impl<A: Alloc + Copy, T> Send for RawTable<A, T> where T: Send {}
991-
unsafe impl<A: Alloc + Copy, T> Sync for RawTable<A, T> where T: Sync {}
990+
unsafe impl<T, A: Alloc + Clone> Send for RawTable<T, A> where T: Send {}
991+
unsafe impl<T, A: Alloc + Clone> Sync for RawTable<T, A> where T: Sync {}
992992

993-
impl<A: Alloc + Copy, T: Clone> Clone for RawTable<A, T> {
993+
impl<T: Clone, A: Alloc + Clone> Clone for RawTable<T, A> {
994994
fn clone(&self) -> Self {
995995
if self.is_empty_singleton() {
996-
Self::new(self.alloc)
996+
Self::new(self.alloc.clone())
997997
} else {
998998
unsafe {
999999
let mut new_table = ManuallyDrop::new(
1000-
Self::new_uninitialized(self.alloc, self.buckets(), Fallibility::Infallible)
1001-
.unwrap_or_else(|_| hint::unreachable_unchecked()),
1000+
Self::new_uninitialized(
1001+
self.alloc.clone(),
1002+
self.buckets(),
1003+
Fallibility::Infallible,
1004+
)
1005+
.unwrap_or_else(|_| hint::unreachable_unchecked()),
10021006
);
10031007

10041008
// Copy the control bytes unchanged. We do this in a single pass
@@ -1043,7 +1047,7 @@ impl<A: Alloc + Copy, T: Clone> Clone for RawTable<A, T> {
10431047
}
10441048

10451049
#[cfg(feature = "nightly")]
1046-
unsafe impl<A: Alloc + Copy, #[may_dangle] T> Drop for RawTable<A, T> {
1050+
unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawTable<T, A> {
10471051
#[cfg_attr(feature = "inline-more", inline)]
10481052
fn drop(&mut self) {
10491053
if !self.is_empty_singleton() {
@@ -1059,7 +1063,7 @@ unsafe impl<A: Alloc + Copy, #[may_dangle] T> Drop for RawTable<A, T> {
10591063
}
10601064
}
10611065
#[cfg(not(feature = "nightly"))]
1062-
impl<A: Alloc + Copy, T> Drop for RawTable<A, T> {
1066+
impl<T, A: Alloc + Clone> Drop for RawTable<T, A> {
10631067
#[cfg_attr(feature = "inline-more", inline)]
10641068
fn drop(&mut self) {
10651069
if !self.is_empty_singleton() {
@@ -1075,14 +1079,14 @@ impl<A: Alloc + Copy, T> Drop for RawTable<A, T> {
10751079
}
10761080
}
10771081

1078-
impl<A: Alloc + Copy, T> IntoIterator for RawTable<A, T> {
1082+
impl<T, A: Alloc + Clone> IntoIterator for RawTable<T, A> {
10791083
type Item = T;
1080-
type IntoIter = RawIntoIter<A, T>;
1084+
type IntoIter = RawIntoIter<T, A>;
10811085

10821086
#[cfg_attr(feature = "inline-more", inline)]
1083-
fn into_iter(self) -> RawIntoIter<A, T> {
1087+
fn into_iter(self) -> RawIntoIter<T, A> {
10841088
unsafe {
1085-
let allocator = self.alloc;
1089+
let allocator = self.alloc.clone();
10861090
let iter = self.iter();
10871091
let alloc = self.into_alloc();
10881092
RawIntoIter {
@@ -1277,25 +1281,25 @@ impl<T> ExactSizeIterator for RawIter<T> {}
12771281
impl<T> FusedIterator for RawIter<T> {}
12781282

12791283
/// Iterator which consumes a table and returns elements.
1280-
pub struct RawIntoIter<A: Alloc + Copy, T> {
1284+
pub struct RawIntoIter<T, A: Alloc + Clone> {
12811285
iter: RawIter<T>,
12821286
alloc: Option<(NonNull<u8>, Layout)>,
12831287
marker: PhantomData<T>,
12841288
allocator: A,
12851289
}
12861290

1287-
impl<A: Alloc + Copy, T> RawIntoIter<A, T> {
1291+
impl<T, A: Alloc + Clone> RawIntoIter<T, A> {
12881292
#[cfg_attr(feature = "inline-more", inline)]
12891293
pub fn iter(&self) -> RawIter<T> {
12901294
self.iter.clone()
12911295
}
12921296
}
12931297

1294-
unsafe impl<A: Alloc + Copy, T> Send for RawIntoIter<A, T> where T: Send {}
1295-
unsafe impl<A: Alloc + Copy, T> Sync for RawIntoIter<A, T> where T: Sync {}
1298+
unsafe impl<T, A: Alloc + Clone> Send for RawIntoIter<T, A> where T: Send {}
1299+
unsafe impl<T, A: Alloc + Clone> Sync for RawIntoIter<T, A> where T: Sync {}
12961300

12971301
#[cfg(feature = "nightly")]
1298-
unsafe impl<A: Alloc + Copy, #[may_dangle] T> Drop for RawIntoIter<A, T> {
1302+
unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawIntoIter<T, A> {
12991303
#[cfg_attr(feature = "inline-more", inline)]
13001304
fn drop(&mut self) {
13011305
unsafe {
@@ -1314,7 +1318,7 @@ unsafe impl<A: Alloc + Copy, #[may_dangle] T> Drop for RawIntoIter<A, T> {
13141318
}
13151319
}
13161320
#[cfg(not(feature = "nightly"))]
1317-
impl<A: Alloc + Copy, T> Drop for RawIntoIter<A, T> {
1321+
impl<T, A: Alloc + Clone> Drop for RawIntoIter<T, A> {
13181322
#[cfg_attr(feature = "inline-more", inline)]
13191323
fn drop(&mut self) {
13201324
unsafe {
@@ -1327,13 +1331,14 @@ impl<A: Alloc + Copy, T> Drop for RawIntoIter<A, T> {
13271331

13281332
// Free the table
13291333
if let Some((ptr, layout)) = self.alloc {
1330-
self.allocator.dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout);
1334+
self.allocator
1335+
.dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout);
13311336
}
13321337
}
13331338
}
13341339
}
13351340

1336-
impl<A: Alloc+ Copy, T> Iterator for RawIntoIter<A, T> {
1341+
impl<T, A: Alloc + Clone> Iterator for RawIntoIter<T, A> {
13371342
type Item = T;
13381343

13391344
#[cfg_attr(feature = "inline-more", inline)]
@@ -1347,35 +1352,35 @@ impl<A: Alloc+ Copy, T> Iterator for RawIntoIter<A, T> {
13471352
}
13481353
}
13491354

1350-
impl<A: Alloc + Copy, T> ExactSizeIterator for RawIntoIter<A, T> {}
1351-
impl<A: Alloc + Copy, T> FusedIterator for RawIntoIter<A, T> {}
1355+
impl<T, A: Alloc + Clone> ExactSizeIterator for RawIntoIter<T, A> {}
1356+
impl<T, A: Alloc + Clone> FusedIterator for RawIntoIter<T, A> {}
13521357

13531358
/// Iterator which consumes elements without freeing the table storage.
1354-
pub struct RawDrain<'a, A: Alloc + Copy, T> {
1359+
pub struct RawDrain<'a, T, A: Alloc + Clone> {
13551360
iter: RawIter<T>,
13561361

13571362
// The table is moved into the iterator for the duration of the drain. This
13581363
// ensures that an empty table is left if the drain iterator is leaked
13591364
// without dropping.
1360-
table: ManuallyDrop<RawTable<A, T>>,
1361-
orig_table: NonNull<RawTable<A, T>>,
1365+
table: ManuallyDrop<RawTable<T, A>>,
1366+
orig_table: NonNull<RawTable<T, A>>,
13621367

13631368
// We don't use a &'a mut RawTable<T> because we want RawDrain to be
13641369
// covariant over T.
1365-
marker: PhantomData<&'a RawTable<A, T>>,
1370+
marker: PhantomData<&'a RawTable<T, A>>,
13661371
}
13671372

1368-
impl<A: Alloc + Copy, T> RawDrain<'_, A, T> {
1373+
impl<T, A: Alloc + Clone> RawDrain<'_, T, A> {
13691374
#[cfg_attr(feature = "inline-more", inline)]
1370-
pub fn iter(&self) -> RawIter< T> {
1375+
pub fn iter(&self) -> RawIter<T> {
13711376
self.iter.clone()
13721377
}
13731378
}
13741379

1375-
unsafe impl<A: Alloc + Copy, T> Send for RawDrain<'_, A, T> where T: Send {}
1376-
unsafe impl<A: Alloc + Copy, T> Sync for RawDrain<'_, A, T> where T: Sync {}
1380+
unsafe impl<T, A: Alloc + Copy> Send for RawDrain<'_, T, A> where T: Send {}
1381+
unsafe impl<T, A: Alloc + Copy> Sync for RawDrain<'_, T, A> where T: Sync {}
13771382

1378-
impl<A: Alloc + Copy, T> Drop for RawDrain<'_, A, T> {
1383+
impl<T, A: Alloc + Clone> Drop for RawDrain<'_, T, A> {
13791384
#[cfg_attr(feature = "inline-more", inline)]
13801385
fn drop(&mut self) {
13811386
unsafe {
@@ -1398,7 +1403,7 @@ impl<A: Alloc + Copy, T> Drop for RawDrain<'_, A, T> {
13981403
}
13991404
}
14001405

1401-
impl<A: Alloc + Copy, T> Iterator for RawDrain<'_, A, T> {
1406+
impl<T, A: Alloc + Clone> Iterator for RawDrain<'_, T, A> {
14021407
type Item = T;
14031408

14041409
#[cfg_attr(feature = "inline-more", inline)]
@@ -1415,5 +1420,5 @@ impl<A: Alloc + Copy, T> Iterator for RawDrain<'_, A, T> {
14151420
}
14161421
}
14171422

1418-
impl<A: Alloc + Copy, T> ExactSizeIterator for RawDrain<'_, A, T> {}
1419-
impl<A: Alloc + Copy, T> FusedIterator for RawDrain<'_, A, T> {}
1423+
impl<T, A: Alloc + Clone> ExactSizeIterator for RawDrain<'_, T, A> {}
1424+
impl<T, A: Alloc + Clone> FusedIterator for RawDrain<'_, T, A> {}

0 commit comments

Comments
 (0)