Skip to content

Commit ea44b1c

Browse files
NobodyXuDarksonn
authored andcommitted
Add conversion from Bytes to Vec<u8> (tokio-rs#547)
Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com> Co-authored-by: Alice Ryhl <aliceryhl@google.com>
1 parent 5348f55 commit ea44b1c

File tree

5 files changed

+240
-0
lines changed

5 files changed

+240
-0
lines changed

src/bytes.rs

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,10 @@ pub(crate) struct Vtable {
109109
/// fn(data, ptr, len)
110110
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
111111
/// fn(data, ptr, len)
112+
///
113+
/// takes `Bytes` to value
114+
pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
115+
/// fn(data, ptr, len)
112116
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
113117
}
114118

@@ -845,6 +849,13 @@ impl From<String> for Bytes {
845849
}
846850
}
847851

852+
impl From<Bytes> for Vec<u8> {
853+
fn from(bytes: Bytes) -> Vec<u8> {
854+
let bytes = mem::ManuallyDrop::new(bytes);
855+
unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
856+
}
857+
}
858+
848859
// ===== impl Vtable =====
849860

850861
impl fmt::Debug for Vtable {
@@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {
860871

861872
const STATIC_VTABLE: Vtable = Vtable {
862873
clone: static_clone,
874+
to_vec: static_to_vec,
863875
drop: static_drop,
864876
};
865877

@@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
868880
Bytes::from_static(slice)
869881
}
870882

883+
unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
884+
let slice = slice::from_raw_parts(ptr, len);
885+
slice.to_vec()
886+
}
887+
871888
unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
872889
// nothing to drop for &'static [u8]
873890
}
@@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
876893

877894
static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
878895
clone: promotable_even_clone,
896+
to_vec: promotable_even_to_vec,
879897
drop: promotable_even_drop,
880898
};
881899

882900
static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
883901
clone: promotable_odd_clone,
902+
to_vec: promotable_odd_to_vec,
884903
drop: promotable_odd_drop,
885904
};
886905

@@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
897916
}
898917
}
899918

919+
unsafe fn promotable_to_vec(
920+
data: &AtomicPtr<()>,
921+
ptr: *const u8,
922+
len: usize,
923+
f: fn(*mut ()) -> *mut u8,
924+
) -> Vec<u8> {
925+
let shared = data.load(Ordering::Acquire);
926+
let kind = shared as usize & KIND_MASK;
927+
928+
if kind == KIND_ARC {
929+
shared_to_vec_impl(shared.cast(), ptr, len)
930+
} else {
931+
// If Bytes holds a Vec, then the offset must be 0.
932+
debug_assert_eq!(kind, KIND_VEC);
933+
934+
let buf = f(shared);
935+
936+
let cap = (ptr as usize - buf as usize) + len;
937+
938+
// Copy back buffer
939+
ptr::copy(ptr, buf, len);
940+
941+
Vec::from_raw_parts(buf, len, cap)
942+
}
943+
}
944+
945+
unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
946+
promotable_to_vec(data, ptr, len, |shared| {
947+
ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
948+
})
949+
}
950+
900951
unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
901952
data.with_mut(|shared| {
902953
let shared = *shared;
@@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
924975
}
925976
}
926977

978+
unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
979+
promotable_to_vec(data, ptr, len, |shared| shared.cast())
980+
}
981+
927982
unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
928983
data.with_mut(|shared| {
929984
let shared = *shared;
@@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
9671022

9681023
static SHARED_VTABLE: Vtable = Vtable {
9691024
clone: shared_clone,
1025+
to_vec: shared_to_vec,
9701026
drop: shared_drop,
9711027
};
9721028

@@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
9791035
shallow_clone_arc(shared as _, ptr, len)
9801036
}
9811037

1038+
unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
1039+
// Check that the ref_cnt is 1 (unique).
1040+
//
1041+
// If it is unique, then it is set to 0 with AcqRel fence for the same
1042+
// reason in release_shared.
1043+
//
1044+
// Otherwise, we take the other branch and call release_shared.
1045+
if (*shared)
1046+
.ref_cnt
1047+
.compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
1048+
.is_ok()
1049+
{
1050+
let buf = (*shared).buf;
1051+
let cap = (*shared).cap;
1052+
1053+
// Deallocate Shared
1054+
drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>));
1055+
1056+
// Copy back buffer
1057+
ptr::copy(ptr, buf, len);
1058+
1059+
Vec::from_raw_parts(buf, len, cap)
1060+
} else {
1061+
let v = slice::from_raw_parts(ptr, len).to_vec();
1062+
release_shared(shared);
1063+
v
1064+
}
1065+
}
1066+
1067+
unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1068+
shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
1069+
}
1070+
9821071
unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
9831072
data.with_mut(|shared| {
9841073
release_shared(shared.cast());

src/bytes_mut.rs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1611,6 +1611,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)
16111611

16121612
static SHARED_VTABLE: Vtable = Vtable {
16131613
clone: shared_v_clone,
1614+
to_vec: shared_v_to_vec,
16141615
drop: shared_v_drop,
16151616
};
16161617

@@ -1622,6 +1623,28 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
16221623
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
16231624
}
16241625

1626+
unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1627+
let shared: *mut Shared = data.load(Ordering::Relaxed).cast();
1628+
1629+
if (*shared).is_unique() {
1630+
let shared = &mut *shared;
1631+
1632+
// Drop shared
1633+
let mut vec = mem::replace(&mut shared.vec, Vec::new());
1634+
release_shared(shared);
1635+
1636+
// Copy back buffer
1637+
ptr::copy(ptr, vec.as_mut_ptr(), len);
1638+
vec.set_len(len);
1639+
1640+
vec
1641+
} else {
1642+
let v = slice::from_raw_parts(ptr, len).to_vec();
1643+
release_shared(shared);
1644+
v
1645+
}
1646+
}
1647+
16251648
unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
16261649
data.with_mut(|shared| {
16271650
release_shared(*shared as *mut Shared);

tests/test_bytes.rs

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1066,3 +1066,73 @@ fn bytes_into_vec() {
10661066
let vec: Vec<u8> = bytes.into();
10671067
assert_eq!(&vec, prefix);
10681068
}
1069+
1070+
#[test]
1071+
fn test_bytes_into_vec() {
1072+
// Test STATIC_VTABLE.to_vec
1073+
let bs = b"1b23exfcz3r";
1074+
let vec: Vec<u8> = Bytes::from_static(bs).into();
1075+
assert_eq!(&*vec, bs);
1076+
1077+
// Test bytes_mut.SHARED_VTABLE.to_vec impl
1078+
eprintln!("1");
1079+
let mut bytes_mut: BytesMut = bs[..].into();
1080+
1081+
// Set kind to KIND_ARC so that after freeze, Bytes will use bytes_mut.SHARED_VTABLE
1082+
eprintln!("2");
1083+
drop(bytes_mut.split_off(bs.len()));
1084+
1085+
eprintln!("3");
1086+
let b1 = bytes_mut.freeze();
1087+
eprintln!("4");
1088+
let b2 = b1.clone();
1089+
1090+
eprintln!("{:#?}", (&*b1).as_ptr());
1091+
1092+
// shared.is_unique() = False
1093+
eprintln!("5");
1094+
assert_eq!(&*Vec::from(b2), bs);
1095+
1096+
// shared.is_unique() = True
1097+
eprintln!("6");
1098+
assert_eq!(&*Vec::from(b1), bs);
1099+
1100+
// Test bytes_mut.SHARED_VTABLE.to_vec impl where offset != 0
1101+
let mut bytes_mut1: BytesMut = bs[..].into();
1102+
let bytes_mut2 = bytes_mut1.split_off(9);
1103+
1104+
let b1 = bytes_mut1.freeze();
1105+
let b2 = bytes_mut2.freeze();
1106+
1107+
assert_eq!(Vec::from(b2), bs[9..]);
1108+
assert_eq!(Vec::from(b1), bs[..9]);
1109+
}
1110+
1111+
#[test]
1112+
fn test_bytes_into_vec_promotable_even() {
1113+
let vec = vec![33u8; 1024];
1114+
1115+
// Test cases where kind == KIND_VEC
1116+
let b1 = Bytes::from(vec.clone());
1117+
assert_eq!(Vec::from(b1), vec);
1118+
1119+
// Test cases where kind == KIND_ARC, ref_cnt == 1
1120+
let b1 = Bytes::from(vec.clone());
1121+
drop(b1.clone());
1122+
assert_eq!(Vec::from(b1), vec);
1123+
1124+
// Test cases where kind == KIND_ARC, ref_cnt == 2
1125+
let b1 = Bytes::from(vec.clone());
1126+
let b2 = b1.clone();
1127+
assert_eq!(Vec::from(b1), vec);
1128+
1129+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
1130+
assert_eq!(Vec::from(b2), vec);
1131+
1132+
// Test cases where offset != 0
1133+
let mut b1 = Bytes::from(vec.clone());
1134+
let b2 = b1.split_off(20);
1135+
1136+
assert_eq!(Vec::from(b2), vec[20..]);
1137+
assert_eq!(Vec::from(b1), vec[..20]);
1138+
}

tests/test_bytes_odd_alloc.rs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,3 +67,32 @@ fn test_bytes_clone_drop() {
6767
let b1 = Bytes::from(vec);
6868
let _b2 = b1.clone();
6969
}
70+
71+
#[test]
72+
fn test_bytes_into_vec() {
73+
let vec = vec![33u8; 1024];
74+
75+
// Test cases where kind == KIND_VEC
76+
let b1 = Bytes::from(vec.clone());
77+
assert_eq!(Vec::from(b1), vec);
78+
79+
// Test cases where kind == KIND_ARC, ref_cnt == 1
80+
let b1 = Bytes::from(vec.clone());
81+
drop(b1.clone());
82+
assert_eq!(Vec::from(b1), vec);
83+
84+
// Test cases where kind == KIND_ARC, ref_cnt == 2
85+
let b1 = Bytes::from(vec.clone());
86+
let b2 = b1.clone();
87+
assert_eq!(Vec::from(b1), vec);
88+
89+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
90+
assert_eq!(Vec::from(b2), vec);
91+
92+
// Test cases where offset != 0
93+
let mut b1 = Bytes::from(vec.clone());
94+
let b2 = b1.split_off(20);
95+
96+
assert_eq!(Vec::from(b2), vec[20..]);
97+
assert_eq!(Vec::from(b1), vec[..20]);
98+
}

tests/test_bytes_vec_alloc.rs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,32 @@ fn invalid_ptr<T>(addr: usize) -> *mut T {
113113
debug_assert_eq!(ptr as usize, addr);
114114
ptr.cast::<T>()
115115
}
116+
117+
#[test]
118+
fn test_bytes_into_vec() {
119+
let vec = vec![33u8; 1024];
120+
121+
// Test cases where kind == KIND_VEC
122+
let b1 = Bytes::from(vec.clone());
123+
assert_eq!(Vec::from(b1), vec);
124+
125+
// Test cases where kind == KIND_ARC, ref_cnt == 1
126+
let b1 = Bytes::from(vec.clone());
127+
drop(b1.clone());
128+
assert_eq!(Vec::from(b1), vec);
129+
130+
// Test cases where kind == KIND_ARC, ref_cnt == 2
131+
let b1 = Bytes::from(vec.clone());
132+
let b2 = b1.clone();
133+
assert_eq!(Vec::from(b1), vec);
134+
135+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
136+
assert_eq!(Vec::from(b2), vec);
137+
138+
// Test cases where offset != 0
139+
let mut b1 = Bytes::from(vec.clone());
140+
let b2 = b1.split_off(20);
141+
142+
assert_eq!(Vec::from(b2), vec[20..]);
143+
assert_eq!(Vec::from(b1), vec[..20]);
144+
}

0 commit comments

Comments
 (0)