@@ -105,8 +105,6 @@ pub struct Scopes<'tcx> {
105
105
106
106
/// Drops that need to be done on paths to the `GeneratorDrop` terminator.
107
107
generator_drops : DropTree ,
108
- // TODO: implement caching
109
- // cached_unwind_drop: DropIdx,
110
108
}
111
109
112
110
#[ derive( Debug ) ]
@@ -127,6 +125,14 @@ struct Scope {
127
125
drops : Vec < DropData > ,
128
126
129
127
moved_locals : Vec < Local > ,
128
+
129
+ /// The drop index that will drop everything in and below this scope on an
130
+ /// unwind path.
131
+ cached_unwind_block : Option < DropIdx > ,
132
+
133
+ /// The drop index that will drop everything in and below this scope on a
134
+ /// generator drop path.
135
+ cached_generator_drop_block : Option < DropIdx > ,
130
136
}
131
137
132
138
#[ derive( Clone , Copy , Debug ) ]
@@ -213,6 +219,11 @@ impl Scope {
213
219
DropKind :: Storage => false ,
214
220
} )
215
221
}
222
+
223
+ fn invalidate_cache ( & mut self ) {
224
+ self . cached_unwind_block = None ;
225
+ self . cached_generator_drop_block = None ;
226
+ }
216
227
}
217
228
218
229
/// A trait that determined how [DropTree::lower_to_mir] creates its blocks and
@@ -229,11 +240,7 @@ impl DropTree {
229
240
DropData { source_info : fake_source_info, local : Local :: MAX , kind : DropKind :: Storage } ;
230
241
let drop_idx = DropIdx :: MAX ;
231
242
let drops = IndexVec :: from_elem_n ( ( fake_data, drop_idx) , 1 ) ;
232
- Self {
233
- drops,
234
- entry_points : Vec :: new ( ) ,
235
- previous_drops : FxHashMap :: default ( ) ,
236
- }
243
+ Self { drops, entry_points : Vec :: new ( ) , previous_drops : FxHashMap :: default ( ) }
237
244
}
238
245
239
246
fn add_drop ( & mut self , drop : DropData , next : DropIdx ) -> DropIdx {
@@ -383,6 +390,8 @@ impl<'tcx> Scopes<'tcx> {
383
390
region_scope_span : region_scope. 1 . span ,
384
391
drops : vec ! [ ] ,
385
392
moved_locals : vec ! [ ] ,
393
+ cached_unwind_block : None ,
394
+ cached_generator_drop_block : None ,
386
395
} ) ;
387
396
}
388
397
@@ -399,10 +408,6 @@ impl<'tcx> Scopes<'tcx> {
399
408
. unwrap_or_else ( || span_bug ! ( span, "region_scope {:?} does not enclose" , region_scope) )
400
409
}
401
410
402
- fn iter_mut ( & mut self ) -> impl DoubleEndedIterator < Item = & mut Scope > + ' _ {
403
- self . scopes . iter_mut ( ) . rev ( )
404
- }
405
-
406
411
/// Returns the topmost active scope, which is known to be alive until
407
412
/// the next scope expression.
408
413
fn topmost ( & self ) -> region:: Scope {
@@ -609,10 +614,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
609
614
} else {
610
615
self . scopes . breakable_scopes [ break_index] . continue_drops . as_mut ( ) . unwrap ( )
611
616
} ;
612
-
613
- let mut drop_idx = DropIdx :: from_u32 ( destination. is_none ( ) as u32 ) ;
614
- for drop in scope_drops {
615
- drop_idx = drops. add_drop ( * drop, drop_idx) ;
617
+ let mut drop_idx = ROOT_NODE ;
618
+ for scope in & self . scopes . scopes [ scope_index + 1 ..] {
619
+ for drop in & scope. drops {
620
+ drop_idx = drops. add_drop ( * drop, drop_idx) ;
621
+ }
616
622
}
617
623
drops. add_entry ( block, drop_idx) ;
618
624
// `build_drop_tree` doesn't have access to our source_info, so we
@@ -668,19 +674,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
668
674
) )
669
675
}
670
676
671
- /// Sets up a path that performs all required cleanup for dropping a generator.
672
- ///
673
- /// This path terminates in GeneratorDrop. Returns the start of the path.
674
- /// None indicates there’s no cleanup to do at this point.
675
- crate fn generator_drop_cleanup ( & mut self , yield_block : BasicBlock ) {
676
- let drops = self . scopes . scopes . iter ( ) . flat_map ( |scope| & scope. drops ) ;
677
- let mut next_drop = ROOT_NODE ;
678
- for drop in drops {
679
- next_drop = self . scopes . generator_drops . add_drop ( * drop, next_drop) ;
680
- }
681
- self . scopes . generator_drops . add_entry ( yield_block, next_drop) ;
682
- }
683
-
684
677
/// Creates a new source scope, nested in the current one.
685
678
crate fn new_source_scope (
686
679
& mut self ,
@@ -777,8 +770,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
777
770
local : Local ,
778
771
drop_kind : DropKind ,
779
772
) {
780
- // TODO: add back in caching.
781
- let _needs_drop = match drop_kind {
773
+ let needs_drop = match drop_kind {
782
774
DropKind :: Value => {
783
775
if !self . hir . needs_drop ( self . local_decls [ local] . ty ) {
784
776
return ;
@@ -798,23 +790,29 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
798
790
}
799
791
} ;
800
792
801
- let scope = self
802
- . scopes
803
- . iter_mut ( )
804
- . find ( |scope| scope. region_scope == region_scope)
805
- . unwrap_or_else ( || {
806
- span_bug ! ( span, "region scope {:?} not in scope to drop {:?}" , region_scope, local) ;
807
- } ) ;
808
-
809
- let region_scope_span = region_scope. span ( self . hir . tcx ( ) , & self . hir . region_scope_tree ) ;
810
- // Attribute scope exit drops to scope's closing brace.
811
- let scope_end = self . hir . tcx ( ) . sess . source_map ( ) . end_point ( region_scope_span) ;
812
-
813
- scope. drops . push ( DropData {
814
- source_info : SourceInfo { span : scope_end, scope : scope. source_scope } ,
815
- local,
816
- kind : drop_kind,
817
- } ) ;
793
+ let invalidate_caches = needs_drop || self . is_generator ;
794
+ for scope in self . scopes . scopes . iter_mut ( ) . rev ( ) {
795
+ if invalidate_caches {
796
+ scope. invalidate_cache ( ) ;
797
+ }
798
+
799
+ if scope. region_scope == region_scope {
800
+ let region_scope_span =
801
+ region_scope. span ( self . hir . tcx ( ) , & self . hir . region_scope_tree ) ;
802
+ // Attribute scope exit drops to scope's closing brace.
803
+ let scope_end = self . hir . tcx ( ) . sess . source_map ( ) . end_point ( region_scope_span) ;
804
+
805
+ scope. drops . push ( DropData {
806
+ source_info : SourceInfo { span : scope_end, scope : scope. source_scope } ,
807
+ local,
808
+ kind : drop_kind,
809
+ } ) ;
810
+
811
+ return ;
812
+ }
813
+ }
814
+
815
+ span_bug ! ( span, "region scope {:?} not in scope to drop {:?}" , region_scope, local) ;
818
816
}
819
817
820
818
/// Indicates that the "local operand" stored in `local` is
@@ -861,9 +859,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
861
859
}
862
860
863
861
Some ( local_scope) => self
862
+ . scopes
864
863
. scopes
865
864
. iter_mut ( )
866
- . find ( |scope| scope. region_scope == local_scope)
865
+ . rfind ( |scope| scope. region_scope == local_scope)
867
866
. unwrap_or_else ( || bug ! ( "scope {:?} not found in scope list!" , local_scope) ) ,
868
867
} ;
869
868
@@ -913,6 +912,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
913
912
// Manually drop the condition on both branches.
914
913
let top_scope = self . scopes . scopes . last_mut ( ) . unwrap ( ) ;
915
914
let top_drop_data = top_scope. drops . pop ( ) . unwrap ( ) ;
915
+ if self . is_generator {
916
+ top_scope. invalidate_cache ( ) ;
917
+ }
916
918
917
919
match top_drop_data. kind {
918
920
DropKind :: Value { .. } => {
@@ -943,17 +945,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
943
945
944
946
fn diverge_cleanup ( & mut self ) -> DropIdx {
945
947
let is_generator = self . is_generator ;
946
- let drops = self
948
+ let ( uncached_scope , mut cached_drop ) = self
947
949
. scopes
948
950
. scopes
949
951
. iter ( )
950
- . flat_map ( |scope| & scope. drops )
951
- . filter ( |drop| is_generator || drop. kind == DropKind :: Value ) ;
952
- let mut next_drop = ROOT_NODE ;
953
- for drop in drops {
954
- next_drop = self . scopes . unwind_drops . add_drop ( * drop, next_drop) ;
952
+ . enumerate ( )
953
+ . rev ( )
954
+ . find_map ( |( scope_idx, scope) | {
955
+ scope. cached_unwind_block . map ( |cached_block| ( scope_idx + 1 , cached_block) )
956
+ } )
957
+ . unwrap_or ( ( 0 , ROOT_NODE ) ) ;
958
+ for scope in & mut self . scopes . scopes [ uncached_scope..] {
959
+ for drop in & scope. drops {
960
+ if is_generator || drop. kind == DropKind :: Value {
961
+ cached_drop = self . scopes . unwind_drops . add_drop ( * drop, cached_drop) ;
962
+ }
963
+ }
964
+ scope. cached_unwind_block = Some ( cached_drop) ;
955
965
}
956
- next_drop
966
+ cached_drop
957
967
}
958
968
959
969
/// Prepares to create a path that performs all required cleanup for
@@ -966,6 +976,30 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
966
976
self . scopes . unwind_drops . add_entry ( start, next_drop) ;
967
977
}
968
978
979
+ /// Sets up a path that performs all required cleanup for dropping a generator.
980
+ ///
981
+ /// This path terminates in GeneratorDrop. Returns the start of the path.
982
+ /// None indicates there’s no cleanup to do at this point.
983
+ crate fn generator_drop_cleanup ( & mut self , yield_block : BasicBlock ) {
984
+ let ( uncached_scope, mut cached_drop) = self
985
+ . scopes
986
+ . scopes
987
+ . iter ( )
988
+ . enumerate ( )
989
+ . rev ( )
990
+ . find_map ( |( scope_idx, scope) | {
991
+ scope. cached_generator_drop_block . map ( |cached_block| ( scope_idx + 1 , cached_block) )
992
+ } )
993
+ . unwrap_or ( ( 0 , ROOT_NODE ) ) ;
994
+ for scope in & mut self . scopes . scopes [ uncached_scope..] {
995
+ for drop in & scope. drops {
996
+ cached_drop = self . scopes . generator_drops . add_drop ( * drop, cached_drop) ;
997
+ }
998
+ scope. cached_generator_drop_block = Some ( cached_drop) ;
999
+ }
1000
+ self . scopes . generator_drops . add_entry ( yield_block, cached_drop) ;
1001
+ }
1002
+
969
1003
/// Utility function for *non*-scope code to build their own drops
970
1004
crate fn build_drop_and_replace (
971
1005
& mut self ,
@@ -1022,6 +1056,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
1022
1056
assert_eq ! ( top_scope. region_scope, region_scope) ;
1023
1057
1024
1058
top_scope. drops . clear ( ) ;
1059
+ top_scope. invalidate_cache ( ) ;
1025
1060
}
1026
1061
}
1027
1062
0 commit comments