1
+ use core:: any:: TypeId ;
1
2
use core:: iter:: FromIterator ;
2
3
use core:: ops:: { Deref , RangeBounds } ;
3
4
use core:: { cmp, fmt, hash, mem, ptr, slice, usize} ;
@@ -114,6 +115,9 @@ pub unsafe trait BytesImpl: 'static {
114
115
/// Decompose `Self` into parts used by `Bytes`.
115
116
fn into_bytes_parts ( this : Self ) -> ( AtomicPtr < ( ) > , * const u8 , usize ) ;
116
117
118
+ /// Creates itself directly from the raw bytes parts decomposed with `into_bytes_parts`.
119
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self ;
120
+
117
121
/// Returns new `Bytes` based on the current parts.
118
122
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes ;
119
123
@@ -132,6 +136,7 @@ pub unsafe trait BytesImpl: 'static {
132
136
}
133
137
134
138
struct Vtable {
139
+ type_id : fn ( ) -> TypeId ,
135
140
/// fn(data, ptr, len)
136
141
clone : unsafe fn ( & AtomicPtr < ( ) > , * const u8 , usize ) -> Bytes ,
137
142
/// fn(data, ptr, len)
@@ -192,6 +197,7 @@ impl Bytes {
192
197
#[ cfg( not( all( loom, test) ) ) ]
193
198
pub const fn from_static ( bytes : & ' static [ u8 ] ) -> Bytes {
194
199
const STATIC_VTABLE : Vtable = Vtable {
200
+ type_id : TypeId :: of :: < StaticImpl > ,
195
201
clone : <StaticImpl as BytesImpl >:: clone,
196
202
will_truncate : <StaticImpl as BytesImpl >:: will_truncate,
197
203
into_vec : <StaticImpl as BytesImpl >:: into_vec,
@@ -209,6 +215,7 @@ impl Bytes {
209
215
#[ cfg( all( loom, test) ) ]
210
216
pub fn from_static ( bytes : & ' static [ u8 ] ) -> Bytes {
211
217
const STATIC_VTABLE : Vtable = Vtable {
218
+ type_id : TypeId :: of :: < StaticImpl > ,
212
219
clone : <StaticImpl as BytesImpl >:: clone,
213
220
will_truncate : <StaticImpl as BytesImpl >:: will_truncate,
214
221
into_vec : <StaticImpl as BytesImpl >:: into_vec,
@@ -235,6 +242,7 @@ impl Bytes {
235
242
len,
236
243
data,
237
244
vtable : & Vtable {
245
+ type_id : TypeId :: of :: < T > ,
238
246
clone : T :: clone,
239
247
will_truncate : T :: will_truncate,
240
248
into_vec : T :: into_vec,
@@ -543,6 +551,19 @@ impl Bytes {
543
551
self . truncate ( 0 ) ;
544
552
}
545
553
554
+ /// Downcast this `Bytes` into its underlying implementation.
555
+ #[ inline]
556
+ pub fn downcast_impl < T : BytesImpl > ( self ) -> Result < T , Bytes > {
557
+ if TypeId :: of :: < T > ( ) == ( self . vtable . type_id ) ( ) {
558
+ Ok ( unsafe {
559
+ let this = & mut * mem:: ManuallyDrop :: new ( self ) ;
560
+ T :: from_bytes_parts ( & mut this. data , this. ptr , this. len )
561
+ } )
562
+ } else {
563
+ Err ( self )
564
+ }
565
+ }
566
+
546
567
// private
547
568
548
569
#[ inline]
@@ -891,6 +912,7 @@ impl From<Bytes> for Vec<u8> {
891
912
impl fmt:: Debug for Vtable {
892
913
fn fmt ( & self , f : & mut fmt:: Formatter < ' _ > ) -> fmt:: Result {
893
914
f. debug_struct ( "Vtable" )
915
+ . field ( "type_id" , & self . type_id )
894
916
. field ( "clone" , & ( self . clone as * const ( ) ) )
895
917
. field ( "will_truncate" , & ( self . will_truncate as * const ( ) ) )
896
918
. field ( "into_vec" , & ( self . into_vec as * const ( ) ) )
@@ -906,7 +928,15 @@ struct StaticImpl(&'static [u8]);
906
928
unsafe impl BytesImpl for StaticImpl {
907
929
fn into_bytes_parts ( this : Self ) -> ( AtomicPtr < ( ) > , * const u8 , usize ) {
908
930
let mut bytes = mem:: ManuallyDrop :: new ( Bytes :: from_static ( this. 0 ) ) ;
909
- ( mem:: take ( & mut bytes. data ) , bytes. ptr , bytes. len )
931
+ (
932
+ mem:: replace ( & mut bytes. data , AtomicPtr :: default ( ) ) ,
933
+ bytes. ptr ,
934
+ bytes. len ,
935
+ )
936
+ }
937
+
938
+ unsafe fn from_bytes_parts ( _data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
939
+ StaticImpl ( slice:: from_raw_parts ( ptr, len) )
910
940
}
911
941
912
942
unsafe fn clone ( _: & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
@@ -932,7 +962,6 @@ struct PromotableOddImpl(Promotable);
932
962
933
963
enum Promotable {
934
964
Owned ( Box < [ u8 ] > ) ,
935
- #[ allow( dead_code) ]
936
965
Shared ( SharedImpl ) ,
937
966
}
938
967
@@ -952,6 +981,12 @@ unsafe impl BytesImpl for PromotableEvenImpl {
952
981
( AtomicPtr :: new ( data. cast ( ) ) , ptr, len)
953
982
}
954
983
984
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
985
+ PromotableEvenImpl ( promotable_from_bytes_parts ( data, ptr, len, |shared| {
986
+ ptr_map ( shared. cast ( ) , |addr| addr & !KIND_MASK )
987
+ } ) )
988
+ }
989
+
955
990
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
956
991
let shared = data. load ( Ordering :: Acquire ) ;
957
992
let kind = shared as usize & KIND_MASK ;
@@ -994,6 +1029,30 @@ unsafe impl BytesImpl for PromotableEvenImpl {
994
1029
}
995
1030
}
996
1031
1032
+ unsafe fn promotable_from_bytes_parts (
1033
+ data : & mut AtomicPtr < ( ) > ,
1034
+ ptr : * const u8 ,
1035
+ len : usize ,
1036
+ f : fn ( * mut ( ) ) -> * mut u8 ,
1037
+ ) -> Promotable {
1038
+ let shared = data. with_mut ( |p| * p) ;
1039
+ let kind = shared as usize & KIND_MASK ;
1040
+
1041
+ if kind == KIND_ARC {
1042
+ Promotable :: Shared ( SharedImpl :: from_bytes_parts ( data, ptr, len) )
1043
+ } else {
1044
+ debug_assert_eq ! ( kind, KIND_VEC ) ;
1045
+
1046
+ let buf = f ( shared) ;
1047
+
1048
+ let cap = ( ptr as usize - buf as usize ) + len;
1049
+
1050
+ let vec = Vec :: from_raw_parts ( buf, cap, cap) ;
1051
+
1052
+ Promotable :: Owned ( vec. into_boxed_slice ( ) )
1053
+ }
1054
+ }
1055
+
997
1056
unsafe fn promotable_into_vec (
998
1057
data : & mut AtomicPtr < ( ) > ,
999
1058
ptr : * const u8 ,
@@ -1034,6 +1093,12 @@ unsafe impl BytesImpl for PromotableOddImpl {
1034
1093
( AtomicPtr :: new ( ptr. cast ( ) ) , ptr, len)
1035
1094
}
1036
1095
1096
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
1097
+ PromotableOddImpl ( promotable_from_bytes_parts ( data, ptr, len, |shared| {
1098
+ shared. cast ( )
1099
+ } ) )
1100
+ }
1101
+
1037
1102
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
1038
1103
let shared = data. load ( Ordering :: Acquire ) ;
1039
1104
let kind = shared as usize & KIND_MASK ;
@@ -1114,6 +1179,14 @@ unsafe impl BytesImpl for SharedImpl {
1114
1179
( AtomicPtr :: new ( this. shared . cast ( ) ) , this. offset , this. len )
1115
1180
}
1116
1181
1182
+ unsafe fn from_bytes_parts ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Self {
1183
+ SharedImpl {
1184
+ shared : ( data. with_mut ( |p| * p) ) . cast ( ) ,
1185
+ offset : ptr,
1186
+ len,
1187
+ }
1188
+ }
1189
+
1117
1190
unsafe fn clone ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
1118
1191
let shared = data. load ( Ordering :: Relaxed ) ;
1119
1192
shallow_clone_arc ( shared as _ , ptr, len)
0 commit comments