Skip to content

Commit cd188cb

Browse files
NobodyXuDarksonn
andauthored
Add conversion from Bytes to Vec<u8> (#547)
Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com> Co-authored-by: Alice Ryhl <aliceryhl@google.com>
1 parent 10d1f6e commit cd188cb

File tree

5 files changed

+240
-0
lines changed

5 files changed

+240
-0
lines changed

src/bytes.rs

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,10 @@ pub(crate) struct Vtable {
109109
/// fn(data, ptr, len)
110110
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
111111
/// fn(data, ptr, len)
112+
///
113+
/// takes `Bytes` to value
114+
pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
115+
/// fn(data, ptr, len)
112116
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
113117
}
114118

@@ -845,6 +849,13 @@ impl From<String> for Bytes {
845849
}
846850
}
847851

852+
impl From<Bytes> for Vec<u8> {
853+
fn from(bytes: Bytes) -> Vec<u8> {
854+
let bytes = mem::ManuallyDrop::new(bytes);
855+
unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
856+
}
857+
}
858+
848859
// ===== impl Vtable =====
849860

850861
impl fmt::Debug for Vtable {
@@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {
860871

861872
const STATIC_VTABLE: Vtable = Vtable {
862873
clone: static_clone,
874+
to_vec: static_to_vec,
863875
drop: static_drop,
864876
};
865877

@@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
868880
Bytes::from_static(slice)
869881
}
870882

883+
unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
884+
let slice = slice::from_raw_parts(ptr, len);
885+
slice.to_vec()
886+
}
887+
871888
unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
872889
// nothing to drop for &'static [u8]
873890
}
@@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
876893

877894
static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
878895
clone: promotable_even_clone,
896+
to_vec: promotable_even_to_vec,
879897
drop: promotable_even_drop,
880898
};
881899

882900
static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
883901
clone: promotable_odd_clone,
902+
to_vec: promotable_odd_to_vec,
884903
drop: promotable_odd_drop,
885904
};
886905

@@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
897916
}
898917
}
899918

919+
unsafe fn promotable_to_vec(
920+
data: &AtomicPtr<()>,
921+
ptr: *const u8,
922+
len: usize,
923+
f: fn(*mut ()) -> *mut u8,
924+
) -> Vec<u8> {
925+
let shared = data.load(Ordering::Acquire);
926+
let kind = shared as usize & KIND_MASK;
927+
928+
if kind == KIND_ARC {
929+
shared_to_vec_impl(shared.cast(), ptr, len)
930+
} else {
931+
// If Bytes holds a Vec, then the offset must be 0.
932+
debug_assert_eq!(kind, KIND_VEC);
933+
934+
let buf = f(shared);
935+
936+
let cap = (ptr as usize - buf as usize) + len;
937+
938+
// Copy back buffer
939+
ptr::copy(ptr, buf, len);
940+
941+
Vec::from_raw_parts(buf, len, cap)
942+
}
943+
}
944+
945+
unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
946+
promotable_to_vec(data, ptr, len, |shared| {
947+
ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
948+
})
949+
}
950+
900951
unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
901952
data.with_mut(|shared| {
902953
let shared = *shared;
@@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
924975
}
925976
}
926977

978+
unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
979+
promotable_to_vec(data, ptr, len, |shared| shared.cast())
980+
}
981+
927982
unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
928983
data.with_mut(|shared| {
929984
let shared = *shared;
@@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
9671022

9681023
static SHARED_VTABLE: Vtable = Vtable {
9691024
clone: shared_clone,
1025+
to_vec: shared_to_vec,
9701026
drop: shared_drop,
9711027
};
9721028

@@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
9791035
shallow_clone_arc(shared as _, ptr, len)
9801036
}
9811037

1038+
unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
1039+
// Check that the ref_cnt is 1 (unique).
1040+
//
1041+
// If it is unique, then it is set to 0 with AcqRel fence for the same
1042+
// reason in release_shared.
1043+
//
1044+
// Otherwise, we take the other branch and call release_shared.
1045+
if (*shared)
1046+
.ref_cnt
1047+
.compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
1048+
.is_ok()
1049+
{
1050+
let buf = (*shared).buf;
1051+
let cap = (*shared).cap;
1052+
1053+
// Deallocate Shared
1054+
drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>));
1055+
1056+
// Copy back buffer
1057+
ptr::copy(ptr, buf, len);
1058+
1059+
Vec::from_raw_parts(buf, len, cap)
1060+
} else {
1061+
let v = slice::from_raw_parts(ptr, len).to_vec();
1062+
release_shared(shared);
1063+
v
1064+
}
1065+
}
1066+
1067+
unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1068+
shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
1069+
}
1070+
9821071
unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
9831072
data.with_mut(|shared| {
9841073
release_shared(shared.cast());

src/bytes_mut.rs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1611,6 +1611,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)
16111611

16121612
static SHARED_VTABLE: Vtable = Vtable {
16131613
clone: shared_v_clone,
1614+
to_vec: shared_v_to_vec,
16141615
drop: shared_v_drop,
16151616
};
16161617

@@ -1622,6 +1623,28 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
16221623
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
16231624
}
16241625

1626+
unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
1627+
let shared: *mut Shared = data.load(Ordering::Relaxed).cast();
1628+
1629+
if (*shared).is_unique() {
1630+
let shared = &mut *shared;
1631+
1632+
// Drop shared
1633+
let mut vec = mem::replace(&mut shared.vec, Vec::new());
1634+
release_shared(shared);
1635+
1636+
// Copy back buffer
1637+
ptr::copy(ptr, vec.as_mut_ptr(), len);
1638+
vec.set_len(len);
1639+
1640+
vec
1641+
} else {
1642+
let v = slice::from_raw_parts(ptr, len).to_vec();
1643+
release_shared(shared);
1644+
v
1645+
}
1646+
}
1647+
16251648
unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
16261649
data.with_mut(|shared| {
16271650
release_shared(*shared as *mut Shared);

tests/test_bytes.rs

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1065,3 +1065,73 @@ fn bytes_into_vec() {
10651065
let vec: Vec<u8> = bytes.into();
10661066
assert_eq!(&vec, prefix);
10671067
}
1068+
1069+
#[test]
1070+
fn test_bytes_into_vec() {
1071+
// Test STATIC_VTABLE.to_vec
1072+
let bs = b"1b23exfcz3r";
1073+
let vec: Vec<u8> = Bytes::from_static(bs).into();
1074+
assert_eq!(&*vec, bs);
1075+
1076+
// Test bytes_mut.SHARED_VTABLE.to_vec impl
1077+
eprintln!("1");
1078+
let mut bytes_mut: BytesMut = bs[..].into();
1079+
1080+
// Set kind to KIND_ARC so that after freeze, Bytes will use bytes_mut.SHARED_VTABLE
1081+
eprintln!("2");
1082+
drop(bytes_mut.split_off(bs.len()));
1083+
1084+
eprintln!("3");
1085+
let b1 = bytes_mut.freeze();
1086+
eprintln!("4");
1087+
let b2 = b1.clone();
1088+
1089+
eprintln!("{:#?}", (&*b1).as_ptr());
1090+
1091+
// shared.is_unique() = False
1092+
eprintln!("5");
1093+
assert_eq!(&*Vec::from(b2), bs);
1094+
1095+
// shared.is_unique() = True
1096+
eprintln!("6");
1097+
assert_eq!(&*Vec::from(b1), bs);
1098+
1099+
// Test bytes_mut.SHARED_VTABLE.to_vec impl where offset != 0
1100+
let mut bytes_mut1: BytesMut = bs[..].into();
1101+
let bytes_mut2 = bytes_mut1.split_off(9);
1102+
1103+
let b1 = bytes_mut1.freeze();
1104+
let b2 = bytes_mut2.freeze();
1105+
1106+
assert_eq!(Vec::from(b2), bs[9..]);
1107+
assert_eq!(Vec::from(b1), bs[..9]);
1108+
}
1109+
1110+
#[test]
1111+
fn test_bytes_into_vec_promotable_even() {
1112+
let vec = vec![33u8; 1024];
1113+
1114+
// Test cases where kind == KIND_VEC
1115+
let b1 = Bytes::from(vec.clone());
1116+
assert_eq!(Vec::from(b1), vec);
1117+
1118+
// Test cases where kind == KIND_ARC, ref_cnt == 1
1119+
let b1 = Bytes::from(vec.clone());
1120+
drop(b1.clone());
1121+
assert_eq!(Vec::from(b1), vec);
1122+
1123+
// Test cases where kind == KIND_ARC, ref_cnt == 2
1124+
let b1 = Bytes::from(vec.clone());
1125+
let b2 = b1.clone();
1126+
assert_eq!(Vec::from(b1), vec);
1127+
1128+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
1129+
assert_eq!(Vec::from(b2), vec);
1130+
1131+
// Test cases where offset != 0
1132+
let mut b1 = Bytes::from(vec.clone());
1133+
let b2 = b1.split_off(20);
1134+
1135+
assert_eq!(Vec::from(b2), vec[20..]);
1136+
assert_eq!(Vec::from(b1), vec[..20]);
1137+
}

tests/test_bytes_odd_alloc.rs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,3 +66,32 @@ fn test_bytes_clone_drop() {
6666
let b1 = Bytes::from(vec);
6767
let _b2 = b1.clone();
6868
}
69+
70+
#[test]
71+
fn test_bytes_into_vec() {
72+
let vec = vec![33u8; 1024];
73+
74+
// Test cases where kind == KIND_VEC
75+
let b1 = Bytes::from(vec.clone());
76+
assert_eq!(Vec::from(b1), vec);
77+
78+
// Test cases where kind == KIND_ARC, ref_cnt == 1
79+
let b1 = Bytes::from(vec.clone());
80+
drop(b1.clone());
81+
assert_eq!(Vec::from(b1), vec);
82+
83+
// Test cases where kind == KIND_ARC, ref_cnt == 2
84+
let b1 = Bytes::from(vec.clone());
85+
let b2 = b1.clone();
86+
assert_eq!(Vec::from(b1), vec);
87+
88+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
89+
assert_eq!(Vec::from(b2), vec);
90+
91+
// Test cases where offset != 0
92+
let mut b1 = Bytes::from(vec.clone());
93+
let b2 = b1.split_off(20);
94+
95+
assert_eq!(Vec::from(b2), vec[20..]);
96+
assert_eq!(Vec::from(b1), vec[..20]);
97+
}

tests/test_bytes_vec_alloc.rs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,3 +112,32 @@ fn invalid_ptr<T>(addr: usize) -> *mut T {
112112
debug_assert_eq!(ptr as usize, addr);
113113
ptr.cast::<T>()
114114
}
115+
116+
#[test]
117+
fn test_bytes_into_vec() {
118+
let vec = vec![33u8; 1024];
119+
120+
// Test cases where kind == KIND_VEC
121+
let b1 = Bytes::from(vec.clone());
122+
assert_eq!(Vec::from(b1), vec);
123+
124+
// Test cases where kind == KIND_ARC, ref_cnt == 1
125+
let b1 = Bytes::from(vec.clone());
126+
drop(b1.clone());
127+
assert_eq!(Vec::from(b1), vec);
128+
129+
// Test cases where kind == KIND_ARC, ref_cnt == 2
130+
let b1 = Bytes::from(vec.clone());
131+
let b2 = b1.clone();
132+
assert_eq!(Vec::from(b1), vec);
133+
134+
// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
135+
assert_eq!(Vec::from(b2), vec);
136+
137+
// Test cases where offset != 0
138+
let mut b1 = Bytes::from(vec.clone());
139+
let b2 = b1.split_off(20);
140+
141+
assert_eq!(Vec::from(b2), vec[20..]);
142+
assert_eq!(Vec::from(b1), vec[..20]);
143+
}

0 commit comments

Comments
 (0)