1
+ use core:: any:: TypeId ;
1
2
use core:: iter:: FromIterator ;
2
3
use core:: ops:: { Deref , RangeBounds } ;
3
4
use core:: { cmp, fmt, hash, mem, ptr, slice, usize} ;
@@ -114,6 +115,9 @@ pub unsafe trait BytesImpl: 'static {
114
115
/// Decompose `Self` into parts used by `Bytes`.
115
116
fn into_bytes_parts ( this : Self ) -> ( AtomicPtr < ( ) > , * const u8 , usize ) ;
116
117
118
+ /// Creates itself directly from the raw bytes parts decomposed with `into_bytes_parts`.
119
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self ;
120
+
117
121
/// Returns new `Bytes` based on the current parts.
118
122
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes ;
119
123
@@ -132,6 +136,7 @@ pub unsafe trait BytesImpl: 'static {
132
136
}
133
137
134
138
struct Vtable {
139
+ type_id : fn ( ) -> TypeId ,
135
140
/// fn(data, ptr, len)
136
141
clone : unsafe fn ( & AtomicPtr < ( ) > , * const u8 , usize ) -> Bytes ,
137
142
/// fn(data, ptr, len)
@@ -192,6 +197,7 @@ impl Bytes {
192
197
#[ cfg( not( all( loom, test) ) ) ]
193
198
pub const fn from_static ( bytes : & ' static [ u8 ] ) -> Bytes {
194
199
const STATIC_VTABLE : Vtable = Vtable {
200
+ type_id : TypeId :: of :: < StaticImpl > ,
195
201
clone : <StaticImpl as BytesImpl >:: clone,
196
202
will_truncate : <StaticImpl as BytesImpl >:: will_truncate,
197
203
into_vec : <StaticImpl as BytesImpl >:: into_vec,
@@ -209,6 +215,7 @@ impl Bytes {
209
215
#[ cfg( all( loom, test) ) ]
210
216
pub fn from_static ( bytes : & ' static [ u8 ] ) -> Bytes {
211
217
const STATIC_VTABLE : Vtable = Vtable {
218
+ type_id : TypeId :: of :: < StaticImpl > ,
212
219
clone : <StaticImpl as BytesImpl >:: clone,
213
220
will_truncate : <StaticImpl as BytesImpl >:: will_truncate,
214
221
into_vec : <StaticImpl as BytesImpl >:: into_vec,
@@ -235,6 +242,7 @@ impl Bytes {
235
242
len,
236
243
data,
237
244
vtable : & Vtable {
245
+ type_id : TypeId :: of :: < T > ,
238
246
clone : T :: clone,
239
247
will_truncate : T :: will_truncate,
240
248
into_vec : T :: into_vec,
@@ -543,6 +551,19 @@ impl Bytes {
543
551
self . truncate ( 0 ) ;
544
552
}
545
553
554
+ /// Downcast this `Bytes` into its underlying implementation.
555
+ #[ inline]
556
+ pub fn downcast_impl < T : BytesImpl > ( self ) -> Result < T , Bytes > {
557
+ if TypeId :: of :: < T > ( ) == ( self . vtable . type_id ) ( ) {
558
+ Ok ( unsafe {
559
+ let this = & mut * mem:: ManuallyDrop :: new ( self ) ;
560
+ T :: from_bytes_parts ( & mut this. data , this. ptr , this. len )
561
+ } )
562
+ } else {
563
+ Err ( self )
564
+ }
565
+ }
566
+
546
567
// private
547
568
548
569
#[ inline]
@@ -891,6 +912,7 @@ impl From<Bytes> for Vec<u8> {
891
912
impl fmt:: Debug for Vtable {
892
913
fn fmt ( & self , f : & mut fmt:: Formatter < ' _ > ) -> fmt:: Result {
893
914
f. debug_struct ( "Vtable" )
915
+ . field ( "type_id" , & self . type_id )
894
916
. field ( "clone" , & ( self . clone as * const ( ) ) )
895
917
. field ( "will_truncate" , & ( self . will_truncate as * const ( ) ) )
896
918
. field ( "into_vec" , & ( self . into_vec as * const ( ) ) )
@@ -909,6 +931,10 @@ unsafe impl BytesImpl for StaticImpl {
909
931
( mem:: take ( & mut bytes. data ) , bytes. ptr , bytes. len )
910
932
}
911
933
934
+ unsafe fn from_bytes_parts ( _data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
935
+ StaticImpl ( slice:: from_raw_parts ( ptr, len) )
936
+ }
937
+
912
938
unsafe fn clone ( _: & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
913
939
let slice = slice:: from_raw_parts ( ptr, len) ;
914
940
Bytes :: from_static ( slice)
@@ -932,7 +958,6 @@ struct PromotableOddImpl(Promotable);
932
958
933
959
enum Promotable {
934
960
Owned ( Box < [ u8 ] > ) ,
935
- #[ allow( dead_code) ]
936
961
Shared ( SharedImpl ) ,
937
962
}
938
963
@@ -952,6 +977,12 @@ unsafe impl BytesImpl for PromotableEvenImpl {
952
977
( AtomicPtr :: new ( data. cast ( ) ) , ptr, len)
953
978
}
954
979
980
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
981
+ PromotableEvenImpl ( promotable_from_bytes_parts ( data, ptr, len, |shared| {
982
+ ptr_map ( shared. cast ( ) , |addr| addr & !KIND_MASK )
983
+ } ) )
984
+ }
985
+
955
986
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
956
987
let shared = data. load ( Ordering :: Acquire ) ;
957
988
let kind = shared as usize & KIND_MASK ;
@@ -994,6 +1025,30 @@ unsafe impl BytesImpl for PromotableEvenImpl {
994
1025
}
995
1026
}
996
1027
1028
+ unsafe fn promotable_from_bytes_parts (
1029
+ data : & mut AtomicPtr < ( ) > ,
1030
+ ptr : * const u8 ,
1031
+ len : usize ,
1032
+ f : fn ( * mut ( ) ) -> * mut u8 ,
1033
+ ) -> Promotable {
1034
+ let shared = * data. get_mut ( ) ;
1035
+ let kind = shared as usize & KIND_MASK ;
1036
+
1037
+ if kind == KIND_ARC {
1038
+ Promotable :: Shared ( SharedImpl :: from_bytes_parts ( data, ptr, len) )
1039
+ } else {
1040
+ debug_assert_eq ! ( kind, KIND_VEC ) ;
1041
+
1042
+ let buf = f ( shared) ;
1043
+
1044
+ let cap = ( ptr as usize - buf as usize ) + len;
1045
+
1046
+ let vec = Vec :: from_raw_parts ( buf, cap, cap) ;
1047
+
1048
+ Promotable :: Owned ( vec. into_boxed_slice ( ) )
1049
+ }
1050
+ }
1051
+
997
1052
unsafe fn promotable_into_vec (
998
1053
data : & mut AtomicPtr < ( ) > ,
999
1054
ptr : * const u8 ,
@@ -1034,6 +1089,12 @@ unsafe impl BytesImpl for PromotableOddImpl {
1034
1089
( AtomicPtr :: new ( ptr. cast ( ) ) , ptr, len)
1035
1090
}
1036
1091
1092
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
1093
+ PromotableOddImpl ( promotable_from_bytes_parts ( data, ptr, len, |shared| {
1094
+ shared. cast ( )
1095
+ } ) )
1096
+ }
1097
+
1037
1098
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
1038
1099
let shared = data. load ( Ordering :: Acquire ) ;
1039
1100
let kind = shared as usize & KIND_MASK ;
@@ -1114,6 +1175,14 @@ unsafe impl BytesImpl for SharedImpl {
1114
1175
( AtomicPtr :: new ( this. shared . cast ( ) ) , this. offset , this. len )
1115
1176
}
1116
1177
1178
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
1179
+ SharedImpl {
1180
+ shared : ( * data. get_mut ( ) ) . cast ( ) ,
1181
+ offset : ptr,
1182
+ len,
1183
+ }
1184
+ }
1185
+
1117
1186
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
1118
1187
let shared = data. load ( Ordering :: Relaxed ) ;
1119
1188
shallow_clone_arc ( shared as _ , ptr, len)
0 commit comments