@@ -47,20 +47,39 @@ struct StackEntry<I: Interner> {
47
47
/// Whether this entry is a non-root cycle participant.
48
48
///
49
49
/// We must not move the result of non-root cycle participants to the
50
- /// global cache. See [SearchGraph::cycle_participants] for more details.
51
- /// We store the highest stack depth of a head of a cycle this goal is involved
52
- /// in. This necessary to soundly cache its provisional result.
50
+ /// global cache. We store the highest stack depth of a head of a cycle
51
+ /// this goal is involved in. This necessary to soundly cache its
52
+ /// provisional result.
53
53
non_root_cycle_participant: Option<StackDepth>,
54
54
55
55
encountered_overflow: bool,
56
56
57
57
has_been_used: HasBeenUsed,
58
+
59
+ /// We put only the root goal of a coinductive cycle into the global cache.
60
+ ///
61
+ /// If we were to use that result when later trying to prove another cycle
62
+ /// participant, we can end up with unstable query results.
63
+ ///
64
+ /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for
65
+ /// an example of where this is needed.
66
+ ///
67
+ /// There can be multiple roots on the same stack, so we need to track
68
+ /// cycle participants per root:
69
+ /// ```plain
70
+ /// A :- B
71
+ /// B :- A, C
72
+ /// C :- D
73
+ /// D :- C
74
+ /// ```
75
+ cycle_participants: FxHashSet<CanonicalInput<I>>,
58
76
/// Starts out as `None` and gets set when rerunning this
59
77
/// goal in case we encounter a cycle.
60
78
provisional_result: Option<QueryResult<I>>,
61
79
}
62
80
63
81
/// The provisional result for a goal which is not on the stack.
82
+ #[derive(Debug)]
64
83
struct DetachedEntry<I: Interner> {
65
84
/// The head of the smallest non-trivial cycle involving this entry.
66
85
///
@@ -110,24 +129,11 @@ pub(super) struct SearchGraph<I: Interner> {
110
129
/// An element is *deeper* in the stack if its index is *lower*.
111
130
stack: IndexVec<StackDepth, StackEntry<I>>,
112
131
provisional_cache: FxHashMap<CanonicalInput<I>, ProvisionalCacheEntry<I>>,
113
- /// We put only the root goal of a coinductive cycle into the global cache.
114
- ///
115
- /// If we were to use that result when later trying to prove another cycle
116
- /// participant, we can end up with unstable query results.
117
- ///
118
- /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for
119
- /// an example of where this is needed.
120
- cycle_participants: FxHashSet<CanonicalInput<I>>,
121
132
}
122
133
123
134
impl<I: Interner> SearchGraph<I> {
124
135
pub(super) fn new(mode: SolverMode) -> SearchGraph<I> {
125
- Self {
126
- mode,
127
- stack: Default::default(),
128
- provisional_cache: Default::default(),
129
- cycle_participants: Default::default(),
130
- }
136
+ Self { mode, stack: Default::default(), provisional_cache: Default::default() }
131
137
}
132
138
133
139
pub(super) fn solver_mode(&self) -> SolverMode {
@@ -149,13 +155,7 @@ impl<I: Interner> SearchGraph<I> {
149
155
}
150
156
151
157
pub(super) fn is_empty(&self) -> bool {
152
- if self.stack.is_empty() {
153
- debug_assert!(self.provisional_cache.is_empty());
154
- debug_assert!(self.cycle_participants.is_empty());
155
- true
156
- } else {
157
- false
158
- }
158
+ self.stack.is_empty()
159
159
}
160
160
161
161
/// Returns the remaining depth allowed for nested goals.
@@ -205,15 +205,26 @@ impl<I: Interner> SearchGraph<I> {
205
205
// their result does not get moved to the global cache.
206
206
fn tag_cycle_participants(
207
207
stack: &mut IndexVec<StackDepth, StackEntry<I>>,
208
- cycle_participants: &mut FxHashSet<CanonicalInput<I>>,
209
208
usage_kind: HasBeenUsed,
210
209
head: StackDepth,
211
210
) {
212
211
stack[head].has_been_used |= usage_kind;
213
212
debug_assert!(!stack[head].has_been_used.is_empty());
214
- for entry in &mut stack.raw[head.index() + 1..] {
213
+
214
+ // The current root of these cycles. Note that this may not be the final
215
+ // root in case a later goal depends on a goal higher up the stack.
216
+ let mut current_root = head;
217
+ while let Some(parent) = stack[current_root].non_root_cycle_participant {
218
+ current_root = parent;
219
+ debug_assert!(!stack[current_root].has_been_used.is_empty());
220
+ }
221
+
222
+ let (stack, cycle_participants) = stack.raw.split_at_mut(head.index() + 1);
223
+ let current_cycle_root = &mut stack[current_root.as_usize()];
224
+ for entry in cycle_participants {
215
225
entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head));
216
- cycle_participants.insert(entry.input);
226
+ current_cycle_root.cycle_participants.insert(entry.input);
227
+ current_cycle_root.cycle_participants.extend(mem::take(&mut entry.cycle_participants));
217
228
}
218
229
}
219
230
@@ -256,6 +267,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
256
267
&mut ProofTreeBuilder<TyCtxt<'tcx>>,
257
268
) -> QueryResult<TyCtxt<'tcx>>,
258
269
) -> QueryResult<TyCtxt<'tcx>> {
270
+ self.check_invariants();
259
271
// Check for overflow.
260
272
let Some(available_depth) = Self::allowed_depth_for_nested(tcx, &self.stack) else {
261
273
if let Some(last) = self.stack.raw.last_mut() {
@@ -292,12 +304,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
292
304
// already set correctly while computing the cache entry.
293
305
inspect
294
306
.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit);
295
- Self::tag_cycle_participants(
296
- &mut self.stack,
297
- &mut self.cycle_participants,
298
- HasBeenUsed::empty(),
299
- entry.head,
300
- );
307
+ Self::tag_cycle_participants(&mut self.stack, HasBeenUsed::empty(), entry.head);
301
308
return entry.result;
302
309
} else if let Some(stack_depth) = cache_entry.stack_depth {
303
310
debug!("encountered cycle with depth {stack_depth:?}");
@@ -314,12 +321,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
314
321
} else {
315
322
HasBeenUsed::INDUCTIVE_CYCLE
316
323
};
317
- Self::tag_cycle_participants(
318
- &mut self.stack,
319
- &mut self.cycle_participants,
320
- usage_kind,
321
- stack_depth,
322
- );
324
+ Self::tag_cycle_participants(&mut self.stack, usage_kind, stack_depth);
323
325
324
326
// Return the provisional result or, if we're in the first iteration,
325
327
// start with no constraints.
@@ -340,6 +342,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
340
342
non_root_cycle_participant: None,
341
343
encountered_overflow: false,
342
344
has_been_used: HasBeenUsed::empty(),
345
+ cycle_participants: Default::default(),
343
346
provisional_result: None,
344
347
};
345
348
assert_eq!(self.stack.push(entry), depth);
@@ -386,27 +389,28 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
386
389
} else {
387
390
self.provisional_cache.remove(&input);
388
391
let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len();
389
- let cycle_participants = mem::take(&mut self.cycle_participants);
390
392
// When encountering a cycle, both inductive and coinductive, we only
391
393
// move the root into the global cache. We also store all other cycle
392
394
// participants involved.
393
395
//
394
396
// We must not use the global cache entry of a root goal if a cycle
395
397
// participant is on the stack. This is necessary to prevent unstable
396
- // results. See the comment of `SearchGraph ::cycle_participants` for
398
+ // results. See the comment of `StackEntry ::cycle_participants` for
397
399
// more details.
398
400
self.global_cache(tcx).insert(
399
401
tcx,
400
402
input,
401
403
proof_tree,
402
404
reached_depth,
403
405
final_entry.encountered_overflow,
404
- cycle_participants,
406
+ final_entry. cycle_participants,
405
407
dep_node,
406
408
result,
407
409
)
408
410
}
409
411
412
+ self.check_invariants();
413
+
410
414
result
411
415
}
412
416
@@ -416,10 +420,10 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
416
420
fn lookup_global_cache(
417
421
&mut self,
418
422
tcx: TyCtxt<'tcx>,
419
- input: CanonicalInput<'tcx>,
423
+ input: CanonicalInput<TyCtxt< 'tcx> >,
420
424
available_depth: Limit,
421
425
inspect: &mut ProofTreeBuilder<TyCtxt<'tcx>>,
422
- ) -> Option<QueryResult<'tcx>> {
426
+ ) -> Option<QueryResult<TyCtxt< 'tcx> >> {
423
427
let CacheData { result, proof_tree, additional_depth, encountered_overflow } = self
424
428
.global_cache(tcx)
425
429
.get(tcx, input, self.stack.iter().map(|e| e.input), available_depth)?;
@@ -450,12 +454,12 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
450
454
}
451
455
}
452
456
453
- enum StepResult<'tcx > {
454
- Done(StackEntry<'tcx >, QueryResult<'tcx >),
457
+ enum StepResult<I: Interner > {
458
+ Done(StackEntry<I >, QueryResult<I >),
455
459
HasChanged,
456
460
}
457
461
458
- impl<'tcx> SearchGraph<'tcx> {
462
+ impl<'tcx> SearchGraph<TyCtxt< 'tcx> > {
459
463
/// When we encounter a coinductive cycle, we have to fetch the
460
464
/// result of that cycle while we are still computing it. Because
461
465
/// of this we continuously recompute the cycle until the result
@@ -464,12 +468,12 @@ impl<'tcx> SearchGraph<'tcx> {
464
468
fn fixpoint_step_in_task<F>(
465
469
&mut self,
466
470
tcx: TyCtxt<'tcx>,
467
- input: CanonicalInput<'tcx>,
471
+ input: CanonicalInput<TyCtxt< 'tcx> >,
468
472
inspect: &mut ProofTreeBuilder<TyCtxt<'tcx>>,
469
473
prove_goal: &mut F,
470
- ) -> StepResult<'tcx>
474
+ ) -> StepResult<TyCtxt< 'tcx> >
471
475
where
472
- F: FnMut(&mut Self, &mut ProofTreeBuilder<TyCtxt<'tcx>>) -> QueryResult<'tcx>,
476
+ F: FnMut(&mut Self, &mut ProofTreeBuilder<TyCtxt<'tcx>>) -> QueryResult<TyCtxt< 'tcx> >,
473
477
{
474
478
let result = prove_goal(self, inspect);
475
479
let stack_entry = self.pop_stack();
@@ -530,3 +534,77 @@ impl<'tcx> SearchGraph<'tcx> {
530
534
Ok(super::response_no_constraints_raw(tcx, goal.max_universe, goal.variables, certainty))
531
535
}
532
536
}
537
+
538
+ impl<I: Interner> SearchGraph<I> {
539
+ #[allow(rustc::potential_query_instability)]
540
+ fn check_invariants(&self) {
541
+ if !cfg!(debug_assertions) {
542
+ return;
543
+ }
544
+
545
+ let SearchGraph { mode: _, stack, provisional_cache } = self;
546
+ if stack.is_empty() {
547
+ assert!(provisional_cache.is_empty());
548
+ }
549
+
550
+ for (depth, entry) in stack.iter_enumerated() {
551
+ let StackEntry {
552
+ input,
553
+ available_depth: _,
554
+ reached_depth: _,
555
+ non_root_cycle_participant,
556
+ encountered_overflow: _,
557
+ has_been_used,
558
+ ref cycle_participants,
559
+ provisional_result,
560
+ } = *entry;
561
+ let cache_entry = provisional_cache.get(&entry.input).unwrap();
562
+ assert_eq!(cache_entry.stack_depth, Some(depth));
563
+ if let Some(head) = non_root_cycle_participant {
564
+ assert!(head < depth);
565
+ assert!(cycle_participants.is_empty());
566
+ assert_ne!(stack[head].has_been_used, HasBeenUsed::empty());
567
+
568
+ let mut current_root = head;
569
+ while let Some(parent) = stack[current_root].non_root_cycle_participant {
570
+ current_root = parent;
571
+ }
572
+ assert!(stack[current_root].cycle_participants.contains(&input));
573
+ }
574
+
575
+ if !cycle_participants.is_empty() {
576
+ assert!(provisional_result.is_some() || !has_been_used.is_empty());
577
+ for entry in stack.iter().take(depth.as_usize()) {
578
+ assert_eq!(cycle_participants.get(&entry.input), None);
579
+ }
580
+ }
581
+ }
582
+
583
+ for (&input, entry) in &self.provisional_cache {
584
+ let ProvisionalCacheEntry { stack_depth, with_coinductive_stack, with_inductive_stack } =
585
+ entry;
586
+ assert!(
587
+ stack_depth.is_some()
588
+ || with_coinductive_stack.is_some()
589
+ || with_inductive_stack.is_some()
590
+ );
591
+
592
+ if let &Some(stack_depth) = stack_depth {
593
+ assert_eq!(stack[stack_depth].input, input);
594
+ }
595
+
596
+ let check_detached = |detached_entry: &DetachedEntry<I>| {
597
+ let DetachedEntry { head, result: _ } = *detached_entry;
598
+ assert_ne!(stack[head].has_been_used, HasBeenUsed::empty());
599
+ };
600
+
601
+ if let Some(with_coinductive_stack) = with_coinductive_stack {
602
+ check_detached(with_coinductive_stack);
603
+ }
604
+
605
+ if let Some(with_inductive_stack) = with_inductive_stack {
606
+ check_detached(with_inductive_stack);
607
+ }
608
+ }
609
+ }
610
+ }
0 commit comments