@@ -110,6 +110,11 @@ pub(crate) struct Vtable {
110
110
pub clone : unsafe fn ( & AtomicPtr < ( ) > , * const u8 , usize ) -> Bytes ,
111
111
/// fn(data, ptr, len)
112
112
///
113
+ /// Called before the `Bytes::truncate` is processed.
114
+ /// Useful if the implementation needs some preparation step for it.
115
+ pub will_truncate : unsafe fn ( & mut AtomicPtr < ( ) > , * const u8 , usize ) ,
116
+ /// fn(data, ptr, len)
117
+ ///
113
118
/// Consumes `Bytes` to return `Vec<u8>`
114
119
pub into_vec : unsafe fn ( & mut AtomicPtr < ( ) > , * const u8 , usize ) -> Vec < u8 > ,
115
120
/// fn(data, ptr, len)
@@ -455,16 +460,10 @@ impl Bytes {
455
460
#[ inline]
456
461
pub fn truncate ( & mut self , len : usize ) {
457
462
if len < self . len {
458
- // The Vec "promotable" vtables do not store the capacity,
459
- // so we cannot truncate while using this repr. We *have* to
460
- // promote using `split_off` so the capacity can be stored.
461
- if self . vtable as * const Vtable == & PROMOTABLE_EVEN_VTABLE
462
- || self . vtable as * const Vtable == & PROMOTABLE_ODD_VTABLE
463
- {
464
- drop ( self . split_off ( len) ) ;
465
- } else {
466
- self . len = len;
463
+ unsafe {
464
+ ( self . vtable . will_truncate ) ( & mut self . data , self . ptr , self . len ) ;
467
465
}
466
+ self . len = len;
468
467
}
469
468
}
470
469
@@ -862,6 +861,7 @@ impl fmt::Debug for Vtable {
862
861
fn fmt ( & self , f : & mut fmt:: Formatter < ' _ > ) -> fmt:: Result {
863
862
f. debug_struct ( "Vtable" )
864
863
. field ( "clone" , & ( self . clone as * const ( ) ) )
864
+ . field ( "will_truncate" , & ( self . will_truncate as * const ( ) ) )
865
865
. field ( "into_vec" , & ( self . into_vec as * const ( ) ) )
866
866
. field ( "drop" , & ( self . drop as * const ( ) ) )
867
867
. finish ( )
@@ -872,6 +872,7 @@ impl fmt::Debug for Vtable {
872
872
873
873
const STATIC_VTABLE : Vtable = Vtable {
874
874
clone : static_clone,
875
+ will_truncate : static_will_truncate,
875
876
into_vec : static_into_vec,
876
877
drop : static_drop,
877
878
} ;
@@ -881,6 +882,10 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
881
882
Bytes :: from_static ( slice)
882
883
}
883
884
885
+ unsafe fn static_will_truncate ( _: & mut AtomicPtr < ( ) > , _: * const u8 , _: usize ) {
886
+ // nothing to do before truncate for &'static [u8]
887
+ }
888
+
884
889
unsafe fn static_into_vec ( _: & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
885
890
let slice = slice:: from_raw_parts ( ptr, len) ;
886
891
slice. to_vec ( )
@@ -894,12 +899,14 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
894
899
895
900
static PROMOTABLE_EVEN_VTABLE : Vtable = Vtable {
896
901
clone : promotable_even_clone,
902
+ will_truncate : promotable_even_will_truncate,
897
903
into_vec : promotable_even_into_vec,
898
904
drop : promotable_even_drop,
899
905
} ;
900
906
901
907
static PROMOTABLE_ODD_VTABLE : Vtable = Vtable {
902
908
clone : promotable_odd_clone,
909
+ will_truncate : promotable_odd_will_truncate,
903
910
into_vec : promotable_odd_into_vec,
904
911
drop : promotable_odd_drop,
905
912
} ;
@@ -917,6 +924,13 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
917
924
}
918
925
}
919
926
927
+ unsafe fn promotable_even_will_truncate ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) {
928
+ // The Vec "promotable" vtables do not store the capacity,
929
+ // so we cannot truncate while using this repr. We *have* to
930
+ // promote using `clone` so the capacity can be stored.
931
+ drop ( promotable_even_clone ( & * data, ptr, len) ) ;
932
+ }
933
+
920
934
unsafe fn promotable_into_vec (
921
935
data : & mut AtomicPtr < ( ) > ,
922
936
ptr : * const u8 ,
@@ -980,6 +994,13 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
980
994
}
981
995
}
982
996
997
+ unsafe fn promotable_odd_will_truncate ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) {
998
+ // The Vec "promotable" vtables do not store the capacity,
999
+ // so we cannot truncate while using this repr. We *have* to
1000
+ // promote using `clone` so the capacity can be stored.
1001
+ drop ( promotable_odd_clone ( & * data, ptr, len) ) ;
1002
+ }
1003
+
983
1004
unsafe fn promotable_odd_into_vec ( data : & mut AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
984
1005
promotable_into_vec ( data, ptr, len, |shared| shared. cast ( ) )
985
1006
}
@@ -1027,6 +1048,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
1027
1048
1028
1049
static SHARED_VTABLE : Vtable = Vtable {
1029
1050
clone : shared_clone,
1051
+ will_truncate : shared_will_truncate,
1030
1052
into_vec : shared_into_vec,
1031
1053
drop : shared_drop,
1032
1054
} ;
@@ -1040,6 +1062,10 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
1040
1062
shallow_clone_arc ( shared as _ , ptr, len)
1041
1063
}
1042
1064
1065
+ unsafe fn shared_will_truncate ( _: & mut AtomicPtr < ( ) > , _: * const u8 , _: usize ) {
1066
+ // nothing to do before truncate for Shared
1067
+ }
1068
+
1043
1069
unsafe fn shared_into_vec_impl ( shared : * mut Shared , ptr : * const u8 , len : usize ) -> Vec < u8 > {
1044
1070
// Check that the ref_cnt is 1 (unique).
1045
1071
//
0 commit comments