@@ -150,6 +150,7 @@ struct ProbeSeq {
150
150
}
151
151
152
152
impl ProbeSeq {
153
+ #[ inline]
153
154
fn move_next ( & mut self , bucket_mask : usize ) {
154
155
// We should have found an empty bucket by now and ended the probe.
155
156
debug_assert ! (
@@ -621,11 +622,10 @@ impl<T> RawTable<T> {
621
622
fn find_insert_slot ( & self , hash : u64 ) -> usize {
622
623
let mut probe_seq = self . probe_seq ( hash) ;
623
624
loop {
624
- let pos = probe_seq. pos ;
625
625
unsafe {
626
- let group = Group :: load ( self . ctrl ( pos) ) ;
626
+ let group = Group :: load ( self . ctrl ( probe_seq . pos ) ) ;
627
627
if let Some ( bit) = group. match_empty_or_deleted ( ) . lowest_set_bit ( ) {
628
- let result = ( pos + bit) & self . bucket_mask ;
628
+ let result = ( probe_seq . pos + bit) & self . bucket_mask ;
629
629
630
630
// In tables smaller than the group width, trailing control
631
631
// bytes outside the range of the table are filled with
@@ -638,7 +638,7 @@ impl<T> RawTable<T> {
638
638
// control bytes (containing EMPTY).
639
639
if unlikely ( is_full ( * self . ctrl ( result) ) ) {
640
640
debug_assert ! ( self . bucket_mask < Group :: WIDTH ) ;
641
- debug_assert_ne ! ( pos, 0 ) ;
641
+ debug_assert_ne ! ( probe_seq . pos, 0 ) ;
642
642
return Group :: load_aligned ( self . ctrl ( 0 ) )
643
643
. match_empty_or_deleted ( )
644
644
. lowest_set_bit_nonzero ( ) ;
0 commit comments