@@ -307,21 +307,21 @@ void BinaryFunction::computeBlockHashes(HashFunction HashFunction) const {
307
307
BB->setHash (BlendedHashes[I].combine ());
308
308
}
309
309
}
310
-
310
+ // TODO: mediate the difference between flow function construction here in BOLT
311
+ // and in the compiler by splitting blocks with exception throwing calls at the
312
+ // call and adding the landing pad as the successor.
311
313
// / Create a wrapper flow function to use with the profile inference algorithm,
312
314
// / and initialize its jumps and metadata.
313
315
FlowFunction
314
316
createFlowFunction (const BinaryFunction::BasicBlockOrderType &BlockOrder) {
315
317
FlowFunction Func;
316
318
317
319
// Add a special "dummy" source so that there is always a unique entry point.
318
- // Because of the extra source, for all other blocks in FlowFunction it holds
319
- // that Block.Index == BB->getIndex() + 1
320
320
FlowBlock EntryBlock;
321
321
EntryBlock.Index = 0 ;
322
322
Func.Blocks .push_back (EntryBlock);
323
323
324
- // Create FlowBlock for every basic block in the binary function
324
+ // Create FlowBlock for every basic block in the binary function.
325
325
for (const BinaryBasicBlock *BB : BlockOrder) {
326
326
Func.Blocks .emplace_back ();
327
327
FlowBlock &Block = Func.Blocks .back ();
@@ -331,7 +331,12 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
331
331
" incorrectly assigned basic block index" );
332
332
}
333
333
334
- // Create FlowJump for each jump between basic blocks in the binary function
334
+ // Add a special "dummy" sink block so there is always a unique sink.
335
+ FlowBlock SinkBlock;
336
+ SinkBlock.Index = Func.Blocks .size ();
337
+ Func.Blocks .push_back (SinkBlock);
338
+
339
+ // Create FlowJump for each jump between basic blocks in the binary function.
335
340
std::vector<uint64_t > InDegree (Func.Blocks .size (), 0 );
336
341
for (const BinaryBasicBlock *SrcBB : BlockOrder) {
337
342
std::unordered_set<const BinaryBasicBlock *> UniqueSuccs;
@@ -348,6 +353,16 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
348
353
InDegree[Jump.Target ]++;
349
354
UniqueSuccs.insert (DstBB);
350
355
}
356
+ // TODO: set jump from exit block to landing pad to Unlikely.
357
+ // If the block is an exit, add a dummy edge from it to the sink block.
358
+ if (UniqueSuccs.empty ()) {
359
+ Func.Jumps .emplace_back ();
360
+ FlowJump &Jump = Func.Jumps .back ();
361
+ Jump.Source = SrcBB->getIndex () + 1 ;
362
+ Jump.Target = Func.Blocks .size () - 1 ;
363
+ InDegree[Jump.Target ]++;
364
+ }
365
+
351
366
// Collect jumps to landing pads
352
367
for (const BinaryBasicBlock *DstBB : SrcBB->landing_pads ()) {
353
368
// Ignoring parallel edges
@@ -364,9 +379,9 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
364
379
}
365
380
366
381
// Add dummy edges to the extra sources. If there are multiple entry blocks,
367
- // add an unlikely edge from 0 to the subsequent ones
382
+ // add an unlikely edge from 0 to the subsequent ones. Skips the sink block.
368
383
assert (InDegree[0 ] == 0 && " dummy entry blocks shouldn't have predecessors" );
369
- for (uint64_t I = 1 ; I < Func.Blocks .size (); I++) {
384
+ for (uint64_t I = 1 ; I < Func.Blocks .size () - 1 ; I++) {
370
385
const BinaryBasicBlock *BB = BlockOrder[I - 1 ];
371
386
if (BB->isEntryPoint () || InDegree[I] == 0 ) {
372
387
Func.Jumps .emplace_back ();
@@ -400,7 +415,7 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
400
415
size_t matchWeightsByHashes (
401
416
BinaryContext &BC, const BinaryFunction::BasicBlockOrderType &BlockOrder,
402
417
const yaml::bolt::BinaryFunctionProfile &YamlBF, FlowFunction &Func) {
403
- assert (Func.Blocks .size () == BlockOrder.size () + 1 );
418
+ assert (Func.Blocks .size () == BlockOrder.size () + 2 );
404
419
405
420
std::vector<FlowBlock *> Blocks;
406
421
std::vector<BlendedBlockHash> BlendedHashes;
@@ -592,9 +607,9 @@ bool canApplyInference(const FlowFunction &Func,
592
607
opts::StaleMatchingMinMatchedBlock * YamlBF.Blocks .size ())
593
608
return false ;
594
609
595
- bool HasExitBlocks = llvm::any_of (
596
- Func. Blocks , [&]( const FlowBlock &Block) { return Block. isExit (); });
597
- if (!HasExitBlocks )
610
+ // Returns false if the artificial sink block has no predecessors meaning
611
+ // there are no exit blocks.
612
+ if (Func. Blocks [Func. Blocks . size () - 1 ]. isEntry () )
598
613
return false ;
599
614
600
615
return true ;
@@ -631,7 +646,7 @@ void assignProfile(BinaryFunction &BF,
631
646
FlowFunction &Func) {
632
647
BinaryContext &BC = BF.getBinaryContext ();
633
648
634
- assert (Func.Blocks .size () == BlockOrder.size () + 1 );
649
+ assert (Func.Blocks .size () == BlockOrder.size () + 2 );
635
650
for (uint64_t I = 0 ; I < BlockOrder.size (); I++) {
636
651
FlowBlock &Block = Func.Blocks [I + 1 ];
637
652
BinaryBasicBlock *BB = BlockOrder[I];
@@ -653,6 +668,9 @@ void assignProfile(BinaryFunction &BF,
653
668
if (Jump->Flow == 0 )
654
669
continue ;
655
670
671
+ // Skips the artificial sink block.
672
+ if (Jump->Target == Func.Blocks .size () - 1 )
673
+ continue ;
656
674
BinaryBasicBlock &SuccBB = *BlockOrder[Jump->Target - 1 ];
657
675
// Check if the edge corresponds to a regular jump or a landing pad
658
676
if (BB->getSuccessor (SuccBB.getLabel ())) {
0 commit comments