Skip to content

Commit 21dd996

Browse files
committed
change to_vec to into_vec and takes &mut AtomicPtr<()> instead of &AtomicPtr<()>
1 parent d1b5d4c commit 21dd996

File tree

2 files changed

+28
-23
lines changed

2 files changed

+28
-23
lines changed

src/bytes.rs

Lines changed: 25 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ pub(crate) struct Vtable {
110110
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
111111
/// fn(data, ptr, len)
112112
///
113-
/// takes `Bytes` to value
114-
pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
113+
/// Consumes `Bytes` to return `Vec<u8>`
114+
pub into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
115115
/// fn(data, ptr, len)
116116
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
117117
}
@@ -851,8 +851,8 @@ impl From<String> for Bytes {
851851

852852
impl From<Bytes> for Vec<u8> {
853853
fn from(bytes: Bytes) -> Vec<u8> {
854-
let bytes = mem::ManuallyDrop::new(bytes);
855-
unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
854+
let bytes = &mut *mem::ManuallyDrop::new(bytes);
855+
unsafe { (bytes.vtable.into_vec)(&mut bytes.data, bytes.ptr, bytes.len) }
856856
}
857857
}
858858

@@ -862,6 +862,7 @@ impl fmt::Debug for Vtable {
862862
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
863863
f.debug_struct("Vtable")
864864
.field("clone", &(self.clone as *const ()))
865+
.field("into_vec", &(self.into_vec as *const ()))
865866
.field("drop", &(self.drop as *const ()))
866867
.finish()
867868
}
@@ -871,7 +872,7 @@ impl fmt::Debug for Vtable {
871872

872873
const STATIC_VTABLE: Vtable = Vtable {
873874
clone: static_clone,
874-
to_vec: static_to_vec,
875+
into_vec: static_into_vec,
875876
drop: static_drop,
876877
};
877878

@@ -880,7 +881,7 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
880881
Bytes::from_static(slice)
881882
}
882883

883-
unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
884+
unsafe fn static_into_vec(_: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
884885
let slice = slice::from_raw_parts(ptr, len);
885886
slice.to_vec()
886887
}
@@ -893,13 +894,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
893894

894895
static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
895896
clone: promotable_even_clone,
896-
to_vec: promotable_even_to_vec,
897+
into_vec: promotable_even_into_vec,
897898
drop: promotable_even_drop,
898899
};
899900

900901
static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
901902
clone: promotable_odd_clone,
902-
to_vec: promotable_odd_to_vec,
903+
into_vec: promotable_odd_into_vec,
903904
drop: promotable_odd_drop,
904905
};
905906

@@ -916,17 +917,17 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
916917
}
917918
}
918919

919-
unsafe fn promotable_to_vec(
920-
data: &AtomicPtr<()>,
920+
unsafe fn promotable_into_vec(
921+
data: &mut AtomicPtr<()>,
921922
ptr: *const u8,
922923
len: usize,
923924
f: fn(*mut ()) -> *mut u8,
924925
) -> Vec<u8> {
925-
let shared = data.load(Ordering::Acquire);
926+
let shared = data.with_mut(|p| *p);
926927
let kind = shared as usize & KIND_MASK;
927928

928929
if kind == KIND_ARC {
929-
shared_to_vec_impl(shared.cast(), ptr, len)
930+
shared_into_vec_impl(shared.cast(), ptr, len)
930931
} else {
931932
// If Bytes holds a Vec, then the offset must be 0.
932933
debug_assert_eq!(kind, KIND_VEC);
@@ -942,8 +943,12 @@ unsafe fn promotable_to_vec(
942943
}
943944
}
944945

945-
unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
946-
promotable_to_vec(data, ptr, len, |shared| {
946+
unsafe fn promotable_even_into_vec(
947+
data: &mut AtomicPtr<()>,
948+
ptr: *const u8,
949+
len: usize,
950+
) -> Vec<u8> {
951+
promotable_into_vec(data, ptr, len, |shared| {
947952
ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
948953
})
949954
}
@@ -975,8 +980,8 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
975980
}
976981
}
977982

978-
unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
979-
promotable_to_vec(data, ptr, len, |shared| shared.cast())
983+
unsafe fn promotable_odd_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
984+
promotable_into_vec(data, ptr, len, |shared| shared.cast())
980985
}
981986

982987
unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
@@ -1022,7 +1027,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
10221027

10231028
static SHARED_VTABLE: Vtable = Vtable {
10241029
clone: shared_clone,
1025-
to_vec: shared_to_vec,
1030+
into_vec: shared_into_vec,
10261031
drop: shared_drop,
10271032
};
10281033

@@ -1035,7 +1040,7 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
10351040
shallow_clone_arc(shared as _, ptr, len)
10361041
}
10371042

1038-
unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
1043+
unsafe fn shared_into_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
10391044
// Check that the ref_cnt is 1 (unique).
10401045
//
10411046
// If it is unique, then it is set to 0 with AcqRel fence for the same
@@ -1064,8 +1069,8 @@ unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) ->
10641069
}
10651070
}
10661071

1067-
unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1068-
shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
1072+
unsafe fn shared_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1073+
shared_into_vec_impl((data.with_mut(|p| *p)).cast(), ptr, len)
10691074
}
10701075

10711076
unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {

src/bytes_mut.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1675,7 +1675,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)
16751675

16761676
static SHARED_VTABLE: Vtable = Vtable {
16771677
clone: shared_v_clone,
1678-
to_vec: shared_v_to_vec,
1678+
into_vec: shared_v_into_vec,
16791679
drop: shared_v_drop,
16801680
};
16811681

@@ -1687,8 +1687,8 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
16871687
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
16881688
}
16891689

1690-
unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1691-
let shared: *mut Shared = data.load(Ordering::Relaxed).cast();
1690+
unsafe fn shared_v_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1691+
let shared: *mut Shared = (data.with_mut(|p| *p)).cast();
16921692

16931693
if (*shared).is_unique() {
16941694
let shared = &mut *shared;

0 commit comments

Comments
 (0)