Path: blob/main/contrib/llvm-project/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
35294 views
//===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//1//2// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.3// See https://llvm.org/LICENSE.txt for license information.4// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception5//6//===----------------------------------------------------------------------===//7/// \file8/// This file defines late ObjC ARC optimizations. ARC stands for Automatic9/// Reference Counting and is a system for managing reference counts for objects10/// in Objective C.11///12/// This specific file mainly deals with ``contracting'' multiple lower level13/// operations into singular higher level operations through pattern matching.14///15/// WARNING: This file knows about certain library functions. It recognizes them16/// by name, and hardwires knowledge of their semantics.17///18/// WARNING: This file knows about how certain Objective-C library functions are19/// used. Naive LLVM IR transformations which would otherwise be20/// behavior-preserving may break these assumptions.21///22//===----------------------------------------------------------------------===//2324// TODO: ObjCARCContract could insert PHI nodes when uses aren't25// dominated by single calls.2627#include "ARCRuntimeEntryPoints.h"28#include "DependencyAnalysis.h"29#include "ObjCARC.h"30#include "ProvenanceAnalysis.h"31#include "llvm/ADT/Statistic.h"32#include "llvm/Analysis/AliasAnalysis.h"33#include "llvm/Analysis/ObjCARCUtil.h"34#include "llvm/IR/Dominators.h"35#include "llvm/IR/EHPersonalities.h"36#include "llvm/IR/InlineAsm.h"37#include "llvm/IR/InstIterator.h"38#include "llvm/IR/Operator.h"39#include "llvm/IR/PassManager.h"40#include "llvm/InitializePasses.h"41#include "llvm/Support/CommandLine.h"42#include "llvm/Support/Debug.h"43#include "llvm/Support/raw_ostream.h"44#include "llvm/Transforms/ObjCARC.h"4546using namespace llvm;47using namespace llvm::objcarc;4849#define DEBUG_TYPE "objc-arc-contract"5051STATISTIC(NumPeeps, "Number of calls peephole-optimized");52STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");5354//===----------------------------------------------------------------------===//55// Declarations56//===----------------------------------------------------------------------===//5758namespace {59/// Late ARC optimizations60///61/// These change the IR in a way that makes it difficult to be analyzed by62/// ObjCARCOpt, so it's run late.6364class ObjCARCContract {65bool Changed;66bool CFGChanged;67AAResults *AA;68DominatorTree *DT;69ProvenanceAnalysis PA;70ARCRuntimeEntryPoints EP;71BundledRetainClaimRVs *BundledInsts = nullptr;7273/// The inline asm string to insert between calls and RetainRV calls to make74/// the optimization work on targets which need it.75const MDString *RVInstMarker;7677/// The set of inserted objc_storeStrong calls. If at the end of walking the78/// function we have found no alloca instructions, these calls can be marked79/// "tail".80SmallPtrSet<CallInst *, 8> StoreStrongCalls;8182/// Returns true if we eliminated Inst.83bool tryToPeepholeInstruction(84Function &F, Instruction *Inst, inst_iterator &Iter,85bool &TailOkForStoreStrong,86const DenseMap<BasicBlock *, ColorVector> &BlockColors);8788bool optimizeRetainCall(Function &F, Instruction *Retain);8990bool contractAutorelease(Function &F, Instruction *Autorelease,91ARCInstKind Class);9293void tryToContractReleaseIntoStoreStrong(94Instruction *Release, inst_iterator &Iter,95const DenseMap<BasicBlock *, ColorVector> &BlockColors);9697public:98bool init(Module &M);99bool run(Function &F, AAResults *AA, DominatorTree *DT);100bool hasCFGChanged() const { return CFGChanged; }101};102103class ObjCARCContractLegacyPass : public FunctionPass {104public:105void getAnalysisUsage(AnalysisUsage &AU) const override;106bool runOnFunction(Function &F) override;107108static char ID;109ObjCARCContractLegacyPass() : FunctionPass(ID) {110initializeObjCARCContractLegacyPassPass(*PassRegistry::getPassRegistry());111}112};113}114115//===----------------------------------------------------------------------===//116// Implementation117//===----------------------------------------------------------------------===//118119/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a120/// return value. We do this late so we do not disrupt the dataflow analysis in121/// ObjCARCOpt.122bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {123const auto *Call = dyn_cast<CallBase>(GetArgRCIdentityRoot(Retain));124if (!Call)125return false;126if (Call->getParent() != Retain->getParent())127return false;128129// Check that the call is next to the retain.130BasicBlock::const_iterator I = ++Call->getIterator();131while (IsNoopInstruction(&*I))132++I;133if (&*I != Retain)134return false;135136// Turn it to an objc_retainAutoreleasedReturnValue.137Changed = true;138++NumPeeps;139140LLVM_DEBUG(141dbgs() << "Transforming objc_retain => "142"objc_retainAutoreleasedReturnValue since the operand is a "143"return value.\nOld: "144<< *Retain << "\n");145146// We do not have to worry about tail calls/does not throw since147// retain/retainRV have the same properties.148Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);149cast<CallInst>(Retain)->setCalledFunction(Decl);150151LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");152return true;153}154155/// Merge an autorelease with a retain into a fused call.156bool ObjCARCContract::contractAutorelease(Function &F, Instruction *Autorelease,157ARCInstKind Class) {158const Value *Arg = GetArgRCIdentityRoot(Autorelease);159160// Check that there are no instructions between the retain and the autorelease161// (such as an autorelease_pop) which may change the count.162DependenceKind DK = Class == ARCInstKind::AutoreleaseRV163? RetainAutoreleaseRVDep164: RetainAutoreleaseDep;165auto *Retain = dyn_cast_or_null<CallInst>(166findSingleDependency(DK, Arg, Autorelease->getParent(), Autorelease, PA));167168if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||169GetArgRCIdentityRoot(Retain) != Arg)170return false;171172Changed = true;173++NumPeeps;174175LLVM_DEBUG(dbgs() << " Fusing retain/autorelease!\n"176" Autorelease:"177<< *Autorelease178<< "\n"179" Retain: "180<< *Retain << "\n");181182Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV183? ARCRuntimeEntryPointKind::RetainAutoreleaseRV184: ARCRuntimeEntryPointKind::RetainAutorelease);185Retain->setCalledFunction(Decl);186187LLVM_DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n");188189EraseInstruction(Autorelease);190return true;191}192193static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,194Instruction *Release,195ProvenanceAnalysis &PA,196AAResults *AA) {197StoreInst *Store = nullptr;198bool SawRelease = false;199200// Get the location associated with Load.201MemoryLocation Loc = MemoryLocation::get(Load);202auto *LocPtr = Loc.Ptr->stripPointerCasts();203204// Walk down to find the store and the release, which may be in either order.205for (auto I = std::next(BasicBlock::iterator(Load)),206E = Load->getParent()->end();207I != E; ++I) {208// If we found the store we were looking for and saw the release,209// break. There is no more work to be done.210if (Store && SawRelease)211break;212213// Now we know that we have not seen either the store or the release. If I214// is the release, mark that we saw the release and continue.215Instruction *Inst = &*I;216if (Inst == Release) {217SawRelease = true;218continue;219}220221// Otherwise, we check if Inst is a "good" store. Grab the instruction class222// of Inst.223ARCInstKind Class = GetBasicARCInstKind(Inst);224225// If we have seen the store, but not the release...226if (Store) {227// We need to make sure that it is safe to move the release from its228// current position to the store. This implies proving that any229// instruction in between Store and the Release conservatively can not use230// the RCIdentityRoot of Release. If we can prove we can ignore Inst, so231// continue...232if (!CanUse(Inst, Load, PA, Class)) {233continue;234}235236// Otherwise, be conservative and return nullptr.237return nullptr;238}239240// Ok, now we know we have not seen a store yet.241242// If Inst is a retain, we don't care about it as it doesn't prevent moving243// the load to the store.244//245// TODO: This is one area where the optimization could be made more246// aggressive.247if (IsRetain(Class))248continue;249250// See if Inst can write to our load location, if it can not, just ignore251// the instruction.252if (!isModSet(AA->getModRefInfo(Inst, Loc)))253continue;254255Store = dyn_cast<StoreInst>(Inst);256257// If Inst can, then check if Inst is a simple store. If Inst is not a258// store or a store that is not simple, then we have some we do not259// understand writing to this memory implying we can not move the load260// over the write to any subsequent store that we may find.261if (!Store || !Store->isSimple())262return nullptr;263264// Then make sure that the pointer we are storing to is Ptr. If so, we265// found our Store!266if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)267continue;268269// Otherwise, we have an unknown store to some other ptr that clobbers270// Loc.Ptr. Bail!271return nullptr;272}273274// If we did not find the store or did not see the release, fail.275if (!Store || !SawRelease)276return nullptr;277278// We succeeded!279return Store;280}281282static Instruction *283findRetainForStoreStrongContraction(Value *New, StoreInst *Store,284Instruction *Release,285ProvenanceAnalysis &PA) {286// Walk up from the Store to find the retain.287BasicBlock::iterator I = Store->getIterator();288BasicBlock::iterator Begin = Store->getParent()->begin();289while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {290Instruction *Inst = &*I;291292// It is only safe to move the retain to the store if we can prove293// conservatively that nothing besides the release can decrement reference294// counts in between the retain and the store.295if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)296return nullptr;297--I;298}299Instruction *Retain = &*I;300if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)301return nullptr;302if (GetArgRCIdentityRoot(Retain) != New)303return nullptr;304return Retain;305}306307/// Attempt to merge an objc_release with a store, load, and objc_retain to form308/// an objc_storeStrong. An objc_storeStrong:309///310/// objc_storeStrong(i8** %old_ptr, i8* new_value)311///312/// is equivalent to the following IR sequence:313///314/// ; Load old value.315/// %old_value = load i8** %old_ptr (1)316///317/// ; Increment the new value and then release the old value. This must occur318/// ; in order in case old_value releases new_value in its destructor causing319/// ; us to potentially have a dangling ptr.320/// tail call i8* @objc_retain(i8* %new_value) (2)321/// tail call void @objc_release(i8* %old_value) (3)322///323/// ; Store the new_value into old_ptr324/// store i8* %new_value, i8** %old_ptr (4)325///326/// The safety of this optimization is based around the following327/// considerations:328///329/// 1. We are forming the store strong at the store. Thus to perform this330/// optimization it must be safe to move the retain, load, and release to331/// (4).332/// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are333/// safe.334void ObjCARCContract::tryToContractReleaseIntoStoreStrong(335Instruction *Release, inst_iterator &Iter,336const DenseMap<BasicBlock *, ColorVector> &BlockColors) {337// See if we are releasing something that we just loaded.338auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));339if (!Load || !Load->isSimple())340return;341342// For now, require everything to be in one basic block.343BasicBlock *BB = Release->getParent();344if (Load->getParent() != BB)345return;346347// First scan down the BB from Load, looking for a store of the RCIdentityRoot348// of Load's349StoreInst *Store =350findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);351// If we fail, bail.352if (!Store)353return;354355// Then find what new_value's RCIdentity Root is.356Value *New = GetRCIdentityRoot(Store->getValueOperand());357358// Then walk up the BB and look for a retain on New without any intervening359// instructions which conservatively might decrement ref counts.360Instruction *Retain =361findRetainForStoreStrongContraction(New, Store, Release, PA);362363// If we fail, bail.364if (!Retain)365return;366367Changed = true;368++NumStoreStrongs;369370LLVM_DEBUG(371llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n"372<< " Old:\n"373<< " Store: " << *Store << "\n"374<< " Release: " << *Release << "\n"375<< " Retain: " << *Retain << "\n"376<< " Load: " << *Load << "\n");377378LLVMContext &C = Release->getContext();379Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));380Type *I8XX = PointerType::getUnqual(I8X);381382Value *Args[] = { Load->getPointerOperand(), New };383if (Args[0]->getType() != I8XX)384Args[0] = new BitCastInst(Args[0], I8XX, "", Store->getIterator());385if (Args[1]->getType() != I8X)386Args[1] = new BitCastInst(Args[1], I8X, "", Store->getIterator());387Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);388CallInst *StoreStrong = objcarc::createCallInstWithColors(389Decl, Args, "", Store->getIterator(), BlockColors);390StoreStrong->setDoesNotThrow();391StoreStrong->setDebugLoc(Store->getDebugLoc());392393// We can't set the tail flag yet, because we haven't yet determined394// whether there are any escaping allocas. Remember this call, so that395// we can set the tail flag once we know it's safe.396StoreStrongCalls.insert(StoreStrong);397398LLVM_DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong399<< "\n");400401if (&*Iter == Retain) ++Iter;402if (&*Iter == Store) ++Iter;403Store->eraseFromParent();404Release->eraseFromParent();405EraseInstruction(Retain);406if (Load->use_empty())407Load->eraseFromParent();408}409410bool ObjCARCContract::tryToPeepholeInstruction(411Function &F, Instruction *Inst, inst_iterator &Iter,412bool &TailOkForStoreStrongs,413const DenseMap<BasicBlock *, ColorVector> &BlockColors) {414// Only these library routines return their argument. In particular,415// objc_retainBlock does not necessarily return its argument.416ARCInstKind Class = GetBasicARCInstKind(Inst);417switch (Class) {418case ARCInstKind::FusedRetainAutorelease:419case ARCInstKind::FusedRetainAutoreleaseRV:420return false;421case ARCInstKind::Autorelease:422case ARCInstKind::AutoreleaseRV:423return contractAutorelease(F, Inst, Class);424case ARCInstKind::Retain:425// Attempt to convert retains to retainrvs if they are next to function426// calls.427if (!optimizeRetainCall(F, Inst))428return false;429// If we succeed in our optimization, fall through.430[[fallthrough]];431case ARCInstKind::RetainRV:432case ARCInstKind::UnsafeClaimRV: {433// Return true if this is a bundled retainRV/claimRV call, which is always434// redundant with the attachedcall in the bundle, and is going to be erased435// at the end of this pass. This avoids undoing objc-arc-expand and436// replacing uses of the retainRV/claimRV call's argument with its result.437if (BundledInsts->contains(Inst))438return true;439440// If this isn't a bundled call, and the target doesn't need a special441// inline-asm marker, we're done: return now, and undo objc-arc-expand.442if (!RVInstMarker)443return false;444445// The target needs a special inline-asm marker. Insert it.446447BasicBlock::iterator BBI = Inst->getIterator();448BasicBlock *InstParent = Inst->getParent();449450// Step up to see if the call immediately precedes the RV call.451// If it's an invoke, we have to cross a block boundary. And we have452// to carefully dodge no-op instructions.453do {454if (BBI == InstParent->begin()) {455BasicBlock *Pred = InstParent->getSinglePredecessor();456if (!Pred)457goto decline_rv_optimization;458BBI = Pred->getTerminator()->getIterator();459break;460}461--BBI;462} while (IsNoopInstruction(&*BBI));463464if (GetRCIdentityRoot(&*BBI) == GetArgRCIdentityRoot(Inst)) {465LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "466"optimization.\n");467Changed = true;468InlineAsm *IA =469InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),470/*isVarArg=*/false),471RVInstMarker->getString(),472/*Constraints=*/"", /*hasSideEffects=*/true);473474objcarc::createCallInstWithColors(IA, std::nullopt, "",475Inst->getIterator(), BlockColors);476}477decline_rv_optimization:478return false;479}480case ARCInstKind::InitWeak: {481// objc_initWeak(p, null) => *p = null482CallInst *CI = cast<CallInst>(Inst);483if (IsNullOrUndef(CI->getArgOperand(1))) {484Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));485Changed = true;486new StoreInst(Null, CI->getArgOperand(0), CI->getIterator());487488LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"489<< " New = " << *Null << "\n");490491CI->replaceAllUsesWith(Null);492CI->eraseFromParent();493}494return true;495}496case ARCInstKind::Release:497// Try to form an objc store strong from our release. If we fail, there is498// nothing further to do below, so continue.499tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);500return true;501case ARCInstKind::User:502// Be conservative if the function has any alloca instructions.503// Technically we only care about escaping alloca instructions,504// but this is sufficient to handle some interesting cases.505if (isa<AllocaInst>(Inst))506TailOkForStoreStrongs = false;507return true;508case ARCInstKind::IntrinsicUser:509// Remove calls to @llvm.objc.clang.arc.use(...).510Changed = true;511Inst->eraseFromParent();512return true;513default:514if (auto *CI = dyn_cast<CallInst>(Inst))515if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) {516// Remove calls to @llvm.objc.clang.arc.noop.use(...).517Changed = true;518CI->eraseFromParent();519}520return true;521}522}523524//===----------------------------------------------------------------------===//525// Top Level Driver526//===----------------------------------------------------------------------===//527528bool ObjCARCContract::init(Module &M) {529EP.init(&M);530531// Initialize RVInstMarker.532RVInstMarker = getRVInstMarker(M);533534return false;535}536537bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {538if (!EnableARCOpts)539return false;540541Changed = CFGChanged = false;542AA = A;543DT = D;544PA.setAA(A);545BundledRetainClaimRVs BRV(/*ContractPass=*/true);546BundledInsts = &BRV;547548std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT);549Changed |= R.first;550CFGChanged |= R.second;551552DenseMap<BasicBlock *, ColorVector> BlockColors;553if (F.hasPersonalityFn() &&554isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))555BlockColors = colorEHFunclets(F);556557LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");558559// Track whether it's ok to mark objc_storeStrong calls with the "tail"560// keyword. Be conservative if the function has variadic arguments.561// It seems that functions which "return twice" are also unsafe for the562// "tail" argument, because they are setjmp, which could need to563// return to an earlier stack state.564bool TailOkForStoreStrongs =565!F.isVarArg() && !F.callsFunctionThatReturnsTwice();566567// For ObjC library calls which return their argument, replace uses of the568// argument with uses of the call return value, if it dominates the use. This569// reduces register pressure.570for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {571Instruction *Inst = &*I++;572573LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");574575if (auto *CI = dyn_cast<CallInst>(Inst))576if (objcarc::hasAttachedCallOpBundle(CI)) {577BundledInsts->insertRVCallWithColors(I->getIterator(), CI, BlockColors);578--I;579Changed = true;580}581582// First try to peephole Inst. If there is nothing further we can do in583// terms of undoing objc-arc-expand, process the next inst.584if (tryToPeepholeInstruction(F, Inst, I, TailOkForStoreStrongs,585BlockColors))586continue;587588// Otherwise, try to undo objc-arc-expand.589590// Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts591// and such; to do the replacement, the argument must have type i8*.592593// Function for replacing uses of Arg dominated by Inst.594auto ReplaceArgUses = [Inst, this](Value *Arg) {595// If we're compiling bugpointed code, don't get in trouble.596if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))597return;598599// Look through the uses of the pointer.600for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();601UI != UE; ) {602// Increment UI now, because we may unlink its element.603Use &U = *UI++;604unsigned OperandNo = U.getOperandNo();605606// If the call's return value dominates a use of the call's argument607// value, rewrite the use to use the return value. We check for608// reachability here because an unreachable call is considered to609// trivially dominate itself, which would lead us to rewriting its610// argument in terms of its return value, which would lead to611// infinite loops in GetArgRCIdentityRoot.612if (!DT->isReachableFromEntry(U) || !DT->dominates(Inst, U))613continue;614615Changed = true;616Instruction *Replacement = Inst;617Type *UseTy = U.get()->getType();618if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {619// For PHI nodes, insert the bitcast in the predecessor block.620unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);621BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);622if (Replacement->getType() != UseTy) {623// A catchswitch is both a pad and a terminator, meaning a basic624// block with a catchswitch has no insertion point. Keep going up625// the dominator tree until we find a non-catchswitch.626BasicBlock *InsertBB = IncomingBB;627while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {628InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();629}630631assert(DT->dominates(Inst, &InsertBB->back()) &&632"Invalid insertion point for bitcast");633Replacement = new BitCastInst(Replacement, UseTy, "",634InsertBB->back().getIterator());635}636637// While we're here, rewrite all edges for this PHI, rather638// than just one use at a time, to minimize the number of639// bitcasts we emit.640for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)641if (PHI->getIncomingBlock(i) == IncomingBB) {642// Keep the UI iterator valid.643if (UI != UE &&644&PHI->getOperandUse(645PHINode::getOperandNumForIncomingValue(i)) == &*UI)646++UI;647PHI->setIncomingValue(i, Replacement);648}649} else {650if (Replacement->getType() != UseTy)651Replacement =652new BitCastInst(Replacement, UseTy, "",653cast<Instruction>(U.getUser())->getIterator());654U.set(Replacement);655}656}657};658659Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);660Value *OrigArg = Arg;661662// TODO: Change this to a do-while.663for (;;) {664ReplaceArgUses(Arg);665666// If Arg is a no-op casted pointer, strip one level of casts and iterate.667if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))668Arg = BI->getOperand(0);669else if (isa<GEPOperator>(Arg) &&670cast<GEPOperator>(Arg)->hasAllZeroIndices())671Arg = cast<GEPOperator>(Arg)->getPointerOperand();672else if (isa<GlobalAlias>(Arg) &&673!cast<GlobalAlias>(Arg)->isInterposable())674Arg = cast<GlobalAlias>(Arg)->getAliasee();675else {676// If Arg is a PHI node, get PHIs that are equivalent to it and replace677// their uses.678if (PHINode *PN = dyn_cast<PHINode>(Arg)) {679SmallVector<Value *, 1> PHIList;680getEquivalentPHIs(*PN, PHIList);681for (Value *PHI : PHIList)682ReplaceArgUses(PHI);683}684break;685}686}687688// Replace bitcast users of Arg that are dominated by Inst.689SmallVector<BitCastInst *, 2> BitCastUsers;690691// Add all bitcast users of the function argument first.692for (User *U : OrigArg->users())693if (auto *BC = dyn_cast<BitCastInst>(U))694BitCastUsers.push_back(BC);695696// Replace the bitcasts with the call return. Iterate until list is empty.697while (!BitCastUsers.empty()) {698auto *BC = BitCastUsers.pop_back_val();699for (User *U : BC->users())700if (auto *B = dyn_cast<BitCastInst>(U))701BitCastUsers.push_back(B);702703ReplaceArgUses(BC);704}705}706707// If this function has no escaping allocas or suspicious vararg usage,708// objc_storeStrong calls can be marked with the "tail" keyword.709if (TailOkForStoreStrongs)710for (CallInst *CI : StoreStrongCalls)711CI->setTailCall();712StoreStrongCalls.clear();713714return Changed;715}716717//===----------------------------------------------------------------------===//718// Misc Pass Manager719//===----------------------------------------------------------------------===//720721char ObjCARCContractLegacyPass::ID = 0;722INITIALIZE_PASS_BEGIN(ObjCARCContractLegacyPass, "objc-arc-contract",723"ObjC ARC contraction", false, false)724INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)725INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)726INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract",727"ObjC ARC contraction", false, false)728729void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {730AU.addRequired<AAResultsWrapperPass>();731AU.addRequired<DominatorTreeWrapperPass>();732}733734Pass *llvm::createObjCARCContractPass() {735return new ObjCARCContractLegacyPass();736}737738bool ObjCARCContractLegacyPass::runOnFunction(Function &F) {739ObjCARCContract OCARCC;740OCARCC.init(*F.getParent());741auto *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();742auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();743return OCARCC.run(F, AA, DT);744}745746PreservedAnalyses ObjCARCContractPass::run(Function &F,747FunctionAnalysisManager &AM) {748ObjCARCContract OCAC;749OCAC.init(*F.getParent());750751bool Changed = OCAC.run(F, &AM.getResult<AAManager>(F),752&AM.getResult<DominatorTreeAnalysis>(F));753bool CFGChanged = OCAC.hasCFGChanged();754if (Changed) {755PreservedAnalyses PA;756if (!CFGChanged)757PA.preserveSet<CFGAnalyses>();758return PA;759}760return PreservedAnalyses::all();761}762763764