39
39
#include " llvm/Analysis/ObjCARCAnalysisUtils.h"
40
40
#include " llvm/Analysis/ObjCARCInstKind.h"
41
41
#include " llvm/Analysis/ObjCARCUtil.h"
42
+ #include " llvm/Analysis/OptimizationRemarkEmitter.h"
42
43
#include " llvm/IR/BasicBlock.h"
43
44
#include " llvm/IR/CFG.h"
44
45
#include " llvm/IR/Constant.h"
@@ -132,11 +133,8 @@ static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
132
133
//
133
134
// The second retain and autorelease can be deleted.
134
135
135
- // TODO: It should be possible to delete
136
- // objc_autoreleasePoolPush and objc_autoreleasePoolPop
137
- // pairs if nothing is actually autoreleased between them. Also, autorelease
138
- // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
139
- // after inlining) can be turned into plain release calls.
136
+ // TODO: Autorelease calls followed by objc_autoreleasePoolPop calls (perhaps in
137
+ // ObjC++ code after inlining) can be turned into plain release calls.
140
138
141
139
// TODO: Critical-edge splitting. If the optimial insertion point is
142
140
// a critical edge, the current algorithm has to fail, because it doesn't
@@ -566,6 +564,8 @@ class ObjCARCOpt {
566
564
567
565
void OptimizeReturns (Function &F);
568
566
567
+ void OptimizeAutoreleasePools (Function &F);
568
+
569
569
template <typename PredicateT>
570
570
static void cloneOpBundlesIf (CallBase *CI,
571
571
SmallVectorImpl<OperandBundleDef> &OpBundles,
@@ -1761,8 +1761,7 @@ void ObjCARCOpt::MoveCalls(Value *Arg, RRInfo &RetainsToMove,
1761
1761
RRInfo &ReleasesToMove,
1762
1762
BlotMapVector<Value *, RRInfo> &Retains,
1763
1763
DenseMap<Value *, RRInfo> &Releases,
1764
- SmallVectorImpl<Instruction *> &DeadInsts,
1765
- Module *M) {
1764
+ SmallVectorImpl<Instruction *> &DeadInsts, Module *M) {
1766
1765
LLVM_DEBUG (dbgs () << " == ObjCARCOpt::MoveCalls ==\n " );
1767
1766
1768
1767
// Insert the new retain and release calls.
@@ -2473,6 +2472,11 @@ bool ObjCARCOpt::run(Function &F, AAResults &AA) {
2473
2472
(1 << unsigned (ARCInstKind::AutoreleaseRV))))
2474
2473
OptimizeReturns (F);
2475
2474
2475
+ // Optimizations for autorelease pools.
2476
+ if (UsedInThisFunction & ((1 << unsigned (ARCInstKind::AutoreleasepoolPush)) |
2477
+ (1 << unsigned (ARCInstKind::AutoreleasepoolPop))))
2478
+ OptimizeAutoreleasePools (F);
2479
+
2476
2480
// Gather statistics after optimization.
2477
2481
#ifndef NDEBUG
2478
2482
if (AreStatisticsEnabled ()) {
@@ -2485,6 +2489,200 @@ bool ObjCARCOpt::run(Function &F, AAResults &AA) {
2485
2489
return Changed;
2486
2490
}
2487
2491
2492
+ // / Helper function to recursively check if a value eventually leads to the target instruction
2493
+ // / through pointer casts and uses, up to a specified depth.
2494
+ static bool checkLeadsToTarget (Value *Val, User *Target, unsigned MaxDepth,
2495
+ SmallPtrSet<Value *, 8 > &Visited) {
2496
+ if (MaxDepth == 0 )
2497
+ return false ;
2498
+
2499
+ // Avoid infinite recursion by tracking visited values
2500
+ if (!Visited.insert (Val).second )
2501
+ return false ;
2502
+
2503
+ for (User *U : Val->users ()) {
2504
+ if (U == Target) {
2505
+ return true ;
2506
+ }
2507
+
2508
+ // For pointer casts, recursively check their users
2509
+ if (isa<CastInst>(U) || isa<BitCastInst>(U)) {
2510
+ Value *CastResult = dyn_cast<Value>(U);
2511
+ if (CastResult && checkLeadsToTarget (CastResult, Target, MaxDepth - 1 , Visited)) {
2512
+ return true ;
2513
+ }
2514
+ }
2515
+ }
2516
+
2517
+ return false ;
2518
+ }
2519
+
2520
+ // / Optimize autorelease pools by eliminating empty push/pop pairs.
2521
+ void ObjCARCOpt::OptimizeAutoreleasePools (Function &F) {
2522
+ LLVM_DEBUG (dbgs () << " \n == ObjCARCOpt::OptimizeAutoreleasePools ==\n " );
2523
+
2524
+ OptimizationRemarkEmitter ORE (&F);
2525
+
2526
+ // Track empty autorelease pool push/pop pairs
2527
+ SmallVector<std::pair<CallInst *, CallInst *>, 4 > EmptyPoolPairs;
2528
+
2529
+ // Process each basic block independently.
2530
+ // TODO: Can we optimize inter-block autorelease pool pairs?
2531
+ // This would involve tracking autorelease pool state across blocks.
2532
+ for (BasicBlock &BB : F) {
2533
+ // Use a stack to track nested autorelease pools
2534
+ SmallVector<std::pair<CallInst *, bool >, 4 > PoolStack; // {push_inst, has_autorelease_in_scope}
2535
+
2536
+ for (Instruction &Inst : BB) {
2537
+ ARCInstKind Class = GetBasicARCInstKind (&Inst);
2538
+
2539
+ switch (Class) {
2540
+ case ARCInstKind::AutoreleasepoolPush: {
2541
+ // Start tracking a new autorelease pool scope
2542
+ CallInst *Push = cast<CallInst>(&Inst);
2543
+ PoolStack.push_back ({Push, false }); // {push_inst, has_autorelease_in_scope}
2544
+ LLVM_DEBUG (dbgs () << " Found autorelease pool push: " << *Push
2545
+ << " \n " );
2546
+ break ;
2547
+ }
2548
+
2549
+ case ARCInstKind::AutoreleasepoolPop: {
2550
+ CallInst *Pop = cast<CallInst>(&Inst);
2551
+
2552
+ if (!PoolStack.empty ()) {
2553
+ auto &TopPool = PoolStack.back ();
2554
+ CallInst *PendingPush = TopPool.first ;
2555
+ bool HasAutoreleaseInScope = TopPool.second ;
2556
+
2557
+ // Check if this pop matches the pending push by comparing the token
2558
+ Value *PopArg = Pop->getArgOperand (0 );
2559
+ bool IsMatchingPop = (PopArg == PendingPush);
2560
+
2561
+ // Also handle pointer casts by stripping them
2562
+ if (!IsMatchingPop) {
2563
+ Value *StrippedPopArg = PopArg->stripPointerCasts ();
2564
+ IsMatchingPop = (StrippedPopArg == PendingPush);
2565
+ }
2566
+
2567
+ // Before adding to EmptyPoolPairs, verify ALL uses of the push
2568
+ // eventually lead to this specific pop instruction
2569
+ bool SafeToOptimize = true ;
2570
+ if (IsMatchingPop) {
2571
+ for (User *U : PendingPush->users ()) {
2572
+ // Check if this use eventually leads to our pop instruction
2573
+ bool LeadsToExpectedPop = false ;
2574
+
2575
+ if (U == Pop) {
2576
+ LeadsToExpectedPop = true ;
2577
+ } else {
2578
+ // For pointer casts, check if they eventually lead to the pop
2579
+ // (up to 3 levels deep)
2580
+ Value *CastResult = dyn_cast<Value>(U);
2581
+ if (CastResult && isa<CastInst>(U)) {
2582
+ SmallPtrSet<Value *, 8 > Visited;
2583
+ if (checkLeadsToTarget (CastResult, Pop, 3 , Visited)) {
2584
+ LeadsToExpectedPop = true ;
2585
+ }
2586
+ }
2587
+ }
2588
+
2589
+ if (!LeadsToExpectedPop) {
2590
+ SafeToOptimize = false ;
2591
+ LLVM_DEBUG (dbgs () << " Unsafe to optimize: push has unexpected use: "
2592
+ << *U << " \n " );
2593
+ break ;
2594
+ }
2595
+ }
2596
+ }
2597
+
2598
+ if (IsMatchingPop && !HasAutoreleaseInScope && SafeToOptimize) {
2599
+ LLVM_DEBUG (dbgs () << " Eliminating empty autorelease pool pair: "
2600
+ << *PendingPush << " and " << *Pop << " \n " );
2601
+
2602
+ // Store the pair for careful deletion later
2603
+ EmptyPoolPairs.push_back ({PendingPush, Pop});
2604
+
2605
+ ++NumNoops;
2606
+ }
2607
+
2608
+ // Pop the stack - remove this pool scope
2609
+ PoolStack.pop_back ();
2610
+ }
2611
+ break ;
2612
+ }
2613
+ case ARCInstKind::CallOrUser:
2614
+ case ARCInstKind::Call:
2615
+ case ARCInstKind::Autorelease:
2616
+ case ARCInstKind::AutoreleaseRV: {
2617
+ // Track that we have autorelease calls in the current pool scope
2618
+ if (!PoolStack.empty ()) {
2619
+ PoolStack.back ().second = true ; // Set has_autorelease_in_scope = true
2620
+ LLVM_DEBUG (
2621
+ dbgs ()
2622
+ << " Found autorelease or potiential autorelease in pool scope: "
2623
+ << Inst << " \n " );
2624
+ }
2625
+ break ;
2626
+ }
2627
+
2628
+ // Enumerate all remaining ARCInstKind cases explicitly
2629
+ case ARCInstKind::Retain:
2630
+ case ARCInstKind::RetainRV:
2631
+ case ARCInstKind::UnsafeClaimRV:
2632
+ case ARCInstKind::RetainBlock:
2633
+ case ARCInstKind::Release:
2634
+ case ARCInstKind::NoopCast:
2635
+ case ARCInstKind::FusedRetainAutorelease:
2636
+ case ARCInstKind::FusedRetainAutoreleaseRV:
2637
+ case ARCInstKind::LoadWeakRetained:
2638
+ case ARCInstKind::StoreWeak:
2639
+ case ARCInstKind::InitWeak:
2640
+ case ARCInstKind::LoadWeak:
2641
+ case ARCInstKind::MoveWeak:
2642
+ case ARCInstKind::CopyWeak:
2643
+ case ARCInstKind::DestroyWeak:
2644
+ case ARCInstKind::StoreStrong:
2645
+ case ARCInstKind::IntrinsicUser:
2646
+ case ARCInstKind::User:
2647
+ case ARCInstKind::None:
2648
+ // These instruction kinds don't affect autorelease pool optimization
2649
+ break ;
2650
+ }
2651
+ }
2652
+ }
2653
+
2654
+ // Handle empty pool pairs carefully to avoid use-after-delete
2655
+ SmallVector<CallInst *, 8 > DeadInsts;
2656
+ for (auto &Pair : EmptyPoolPairs) {
2657
+ CallInst *Push = Pair.first ;
2658
+ CallInst *Pop = Pair.second ;
2659
+
2660
+ // Replace all uses of push with poison before deletion
2661
+ Push->replaceAllUsesWith (PoisonValue::get (Push->getType ()));
2662
+
2663
+ LLVM_DEBUG (dbgs () << " Erasing empty pool pair: " << *Push << " and " << *Pop
2664
+ << " \n " );
2665
+ DeadInsts.push_back (Pop);
2666
+ DeadInsts.push_back (Push);
2667
+ }
2668
+
2669
+ // Remove the pairs - set Changed = true when we actually delete instructions
2670
+ if (!DeadInsts.empty ()) {
2671
+ Changed = true ;
2672
+ for (CallInst *DeadInst : DeadInsts) {
2673
+ LLVM_DEBUG (dbgs () << " Erasing dead instruction: " << *DeadInst << " \n " );
2674
+ DeadInst->eraseFromParent ();
2675
+ }
2676
+
2677
+ // Emit optimization remark for eliminated pool pairs
2678
+ ORE.emit ([&]() {
2679
+ return OptimizationRemark (DEBUG_TYPE, " AutoreleasePoolElimination" , &F)
2680
+ << " eliminated " << ore::NV (" NumPairs" , EmptyPoolPairs.size ())
2681
+ << " empty autorelease pool pair(s)" ;
2682
+ });
2683
+ }
2684
+ }
2685
+
2488
2686
// / @}
2489
2687
// /
2490
2688
0 commit comments