1 | //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | /// \file |
9 | /// This file defines late ObjC ARC optimizations. ARC stands for Automatic |
10 | /// Reference Counting and is a system for managing reference counts for objects |
11 | /// in Objective C. |
12 | /// |
13 | /// This specific file mainly deals with ``contracting'' multiple lower level |
14 | /// operations into singular higher level operations through pattern matching. |
15 | /// |
16 | /// WARNING: This file knows about certain library functions. It recognizes them |
17 | /// by name, and hardwires knowledge of their semantics. |
18 | /// |
19 | /// WARNING: This file knows about how certain Objective-C library functions are |
20 | /// used. Naive LLVM IR transformations which would otherwise be |
21 | /// behavior-preserving may break these assumptions. |
22 | /// |
23 | //===----------------------------------------------------------------------===// |
24 | |
25 | // TODO: ObjCARCContract could insert PHI nodes when uses aren't |
26 | // dominated by single calls. |
27 | |
28 | #include "ARCRuntimeEntryPoints.h" |
29 | #include "DependencyAnalysis.h" |
30 | #include "ObjCARC.h" |
31 | #include "ProvenanceAnalysis.h" |
32 | #include "llvm/ADT/Statistic.h" |
33 | #include "llvm/Analysis/AliasAnalysis.h" |
34 | #include "llvm/Analysis/ObjCARCUtil.h" |
35 | #include "llvm/IR/Dominators.h" |
36 | #include "llvm/IR/EHPersonalities.h" |
37 | #include "llvm/IR/InlineAsm.h" |
38 | #include "llvm/IR/InstIterator.h" |
39 | #include "llvm/IR/Operator.h" |
40 | #include "llvm/IR/PassManager.h" |
41 | #include "llvm/InitializePasses.h" |
42 | #include "llvm/Support/CommandLine.h" |
43 | #include "llvm/Support/Debug.h" |
44 | #include "llvm/Support/raw_ostream.h" |
45 | #include "llvm/Transforms/ObjCARC.h" |
46 | |
47 | using namespace llvm; |
48 | using namespace llvm::objcarc; |
49 | |
50 | #define DEBUG_TYPE "objc-arc-contract" |
51 | |
52 | STATISTIC(NumPeeps, "Number of calls peephole-optimized" ); |
53 | STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed" ); |
54 | |
55 | //===----------------------------------------------------------------------===// |
56 | // Declarations |
57 | //===----------------------------------------------------------------------===// |
58 | |
59 | namespace { |
60 | /// Late ARC optimizations |
61 | /// |
62 | /// These change the IR in a way that makes it difficult to be analyzed by |
63 | /// ObjCARCOpt, so it's run late. |
64 | |
65 | class ObjCARCContract { |
66 | bool Changed; |
67 | bool CFGChanged; |
68 | AAResults *AA; |
69 | DominatorTree *DT; |
70 | ProvenanceAnalysis PA; |
71 | ARCRuntimeEntryPoints EP; |
72 | BundledRetainClaimRVs *BundledInsts = nullptr; |
73 | |
74 | /// The inline asm string to insert between calls and RetainRV calls to make |
75 | /// the optimization work on targets which need it. |
76 | const MDString *RVInstMarker; |
77 | |
78 | /// The set of inserted objc_storeStrong calls. If at the end of walking the |
79 | /// function we have found no alloca instructions, these calls can be marked |
80 | /// "tail". |
81 | SmallPtrSet<CallInst *, 8> StoreStrongCalls; |
82 | |
83 | /// Returns true if we eliminated Inst. |
84 | bool tryToPeepholeInstruction( |
85 | Function &F, Instruction *Inst, inst_iterator &Iter, |
86 | bool &TailOkForStoreStrong, |
87 | const DenseMap<BasicBlock *, ColorVector> &BlockColors); |
88 | |
89 | bool optimizeRetainCall(Function &F, Instruction *Retain); |
90 | |
91 | bool contractAutorelease(Function &F, Instruction *Autorelease, |
92 | ARCInstKind Class); |
93 | |
94 | void tryToContractReleaseIntoStoreStrong( |
95 | Instruction *Release, inst_iterator &Iter, |
96 | const DenseMap<BasicBlock *, ColorVector> &BlockColors); |
97 | |
98 | public: |
99 | bool init(Module &M); |
100 | bool run(Function &F, AAResults *AA, DominatorTree *DT); |
101 | bool hasCFGChanged() const { return CFGChanged; } |
102 | }; |
103 | |
104 | class ObjCARCContractLegacyPass : public FunctionPass { |
105 | public: |
106 | void getAnalysisUsage(AnalysisUsage &AU) const override; |
107 | bool runOnFunction(Function &F) override; |
108 | |
109 | static char ID; |
110 | ObjCARCContractLegacyPass() : FunctionPass(ID) { |
111 | initializeObjCARCContractLegacyPassPass(*PassRegistry::getPassRegistry()); |
112 | } |
113 | }; |
114 | } |
115 | |
116 | //===----------------------------------------------------------------------===// |
117 | // Implementation |
118 | //===----------------------------------------------------------------------===// |
119 | |
120 | /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a |
121 | /// return value. We do this late so we do not disrupt the dataflow analysis in |
122 | /// ObjCARCOpt. |
123 | bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) { |
124 | const auto *Call = dyn_cast<CallBase>(Val: GetArgRCIdentityRoot(Inst: Retain)); |
125 | if (!Call) |
126 | return false; |
127 | if (Call->getParent() != Retain->getParent()) |
128 | return false; |
129 | |
130 | // Check that the call is next to the retain. |
131 | BasicBlock::const_iterator I = ++Call->getIterator(); |
132 | while (IsNoopInstruction(I: &*I)) |
133 | ++I; |
134 | if (&*I != Retain) |
135 | return false; |
136 | |
137 | // Turn it to an objc_retainAutoreleasedReturnValue. |
138 | Changed = true; |
139 | ++NumPeeps; |
140 | |
141 | LLVM_DEBUG( |
142 | dbgs() << "Transforming objc_retain => " |
143 | "objc_retainAutoreleasedReturnValue since the operand is a " |
144 | "return value.\nOld: " |
145 | << *Retain << "\n" ); |
146 | |
147 | // We do not have to worry about tail calls/does not throw since |
148 | // retain/retainRV have the same properties. |
149 | Function *Decl = EP.get(kind: ARCRuntimeEntryPointKind::RetainRV); |
150 | cast<CallInst>(Val: Retain)->setCalledFunction(Decl); |
151 | |
152 | LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n" ); |
153 | return true; |
154 | } |
155 | |
156 | /// Merge an autorelease with a retain into a fused call. |
157 | bool ObjCARCContract::contractAutorelease(Function &F, Instruction *Autorelease, |
158 | ARCInstKind Class) { |
159 | const Value *Arg = GetArgRCIdentityRoot(Inst: Autorelease); |
160 | |
161 | // Check that there are no instructions between the retain and the autorelease |
162 | // (such as an autorelease_pop) which may change the count. |
163 | DependenceKind DK = Class == ARCInstKind::AutoreleaseRV |
164 | ? RetainAutoreleaseRVDep |
165 | : RetainAutoreleaseDep; |
166 | auto *Retain = dyn_cast_or_null<CallInst>( |
167 | Val: findSingleDependency(Flavor: DK, Arg, StartBB: Autorelease->getParent(), StartInst: Autorelease, PA)); |
168 | |
169 | if (!Retain || GetBasicARCInstKind(V: Retain) != ARCInstKind::Retain || |
170 | GetArgRCIdentityRoot(Inst: Retain) != Arg) |
171 | return false; |
172 | |
173 | Changed = true; |
174 | ++NumPeeps; |
175 | |
176 | LLVM_DEBUG(dbgs() << " Fusing retain/autorelease!\n" |
177 | " Autorelease:" |
178 | << *Autorelease |
179 | << "\n" |
180 | " Retain: " |
181 | << *Retain << "\n" ); |
182 | |
183 | Function *Decl = EP.get(kind: Class == ARCInstKind::AutoreleaseRV |
184 | ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV |
185 | : ARCRuntimeEntryPointKind::RetainAutorelease); |
186 | Retain->setCalledFunction(Decl); |
187 | |
188 | LLVM_DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n" ); |
189 | |
190 | EraseInstruction(CI: Autorelease); |
191 | return true; |
192 | } |
193 | |
194 | static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, |
195 | Instruction *Release, |
196 | ProvenanceAnalysis &PA, |
197 | AAResults *AA) { |
198 | StoreInst *Store = nullptr; |
199 | bool SawRelease = false; |
200 | |
201 | // Get the location associated with Load. |
202 | MemoryLocation Loc = MemoryLocation::get(LI: Load); |
203 | auto *LocPtr = Loc.Ptr->stripPointerCasts(); |
204 | |
205 | // Walk down to find the store and the release, which may be in either order. |
206 | for (auto I = std::next(x: BasicBlock::iterator(Load)), |
207 | E = Load->getParent()->end(); |
208 | I != E; ++I) { |
209 | // If we found the store we were looking for and saw the release, |
210 | // break. There is no more work to be done. |
211 | if (Store && SawRelease) |
212 | break; |
213 | |
214 | // Now we know that we have not seen either the store or the release. If I |
215 | // is the release, mark that we saw the release and continue. |
216 | Instruction *Inst = &*I; |
217 | if (Inst == Release) { |
218 | SawRelease = true; |
219 | continue; |
220 | } |
221 | |
222 | // Otherwise, we check if Inst is a "good" store. Grab the instruction class |
223 | // of Inst. |
224 | ARCInstKind Class = GetBasicARCInstKind(V: Inst); |
225 | |
226 | // If we have seen the store, but not the release... |
227 | if (Store) { |
228 | // We need to make sure that it is safe to move the release from its |
229 | // current position to the store. This implies proving that any |
230 | // instruction in between Store and the Release conservatively can not use |
231 | // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so |
232 | // continue... |
233 | if (!CanUse(Inst, Ptr: Load, PA, Class)) { |
234 | continue; |
235 | } |
236 | |
237 | // Otherwise, be conservative and return nullptr. |
238 | return nullptr; |
239 | } |
240 | |
241 | // Ok, now we know we have not seen a store yet. |
242 | |
243 | // If Inst is a retain, we don't care about it as it doesn't prevent moving |
244 | // the load to the store. |
245 | // |
246 | // TODO: This is one area where the optimization could be made more |
247 | // aggressive. |
248 | if (IsRetain(Class)) |
249 | continue; |
250 | |
251 | // See if Inst can write to our load location, if it can not, just ignore |
252 | // the instruction. |
253 | if (!isModSet(MRI: AA->getModRefInfo(I: Inst, OptLoc: Loc))) |
254 | continue; |
255 | |
256 | Store = dyn_cast<StoreInst>(Val: Inst); |
257 | |
258 | // If Inst can, then check if Inst is a simple store. If Inst is not a |
259 | // store or a store that is not simple, then we have some we do not |
260 | // understand writing to this memory implying we can not move the load |
261 | // over the write to any subsequent store that we may find. |
262 | if (!Store || !Store->isSimple()) |
263 | return nullptr; |
264 | |
265 | // Then make sure that the pointer we are storing to is Ptr. If so, we |
266 | // found our Store! |
267 | if (Store->getPointerOperand()->stripPointerCasts() == LocPtr) |
268 | continue; |
269 | |
270 | // Otherwise, we have an unknown store to some other ptr that clobbers |
271 | // Loc.Ptr. Bail! |
272 | return nullptr; |
273 | } |
274 | |
275 | // If we did not find the store or did not see the release, fail. |
276 | if (!Store || !SawRelease) |
277 | return nullptr; |
278 | |
279 | // We succeeded! |
280 | return Store; |
281 | } |
282 | |
283 | static Instruction * |
284 | findRetainForStoreStrongContraction(Value *New, StoreInst *Store, |
285 | Instruction *Release, |
286 | ProvenanceAnalysis &PA) { |
287 | // Walk up from the Store to find the retain. |
288 | BasicBlock::iterator I = Store->getIterator(); |
289 | BasicBlock::iterator Begin = Store->getParent()->begin(); |
290 | while (I != Begin && GetBasicARCInstKind(V: &*I) != ARCInstKind::Retain) { |
291 | Instruction *Inst = &*I; |
292 | |
293 | // It is only safe to move the retain to the store if we can prove |
294 | // conservatively that nothing besides the release can decrement reference |
295 | // counts in between the retain and the store. |
296 | if (CanDecrementRefCount(Inst, Ptr: New, PA) && Inst != Release) |
297 | return nullptr; |
298 | --I; |
299 | } |
300 | Instruction *Retain = &*I; |
301 | if (GetBasicARCInstKind(V: Retain) != ARCInstKind::Retain) |
302 | return nullptr; |
303 | if (GetArgRCIdentityRoot(Inst: Retain) != New) |
304 | return nullptr; |
305 | return Retain; |
306 | } |
307 | |
308 | /// Attempt to merge an objc_release with a store, load, and objc_retain to form |
309 | /// an objc_storeStrong. An objc_storeStrong: |
310 | /// |
311 | /// objc_storeStrong(i8** %old_ptr, i8* new_value) |
312 | /// |
313 | /// is equivalent to the following IR sequence: |
314 | /// |
315 | /// ; Load old value. |
316 | /// %old_value = load i8** %old_ptr (1) |
317 | /// |
318 | /// ; Increment the new value and then release the old value. This must occur |
319 | /// ; in order in case old_value releases new_value in its destructor causing |
320 | /// ; us to potentially have a dangling ptr. |
321 | /// tail call i8* @objc_retain(i8* %new_value) (2) |
322 | /// tail call void @objc_release(i8* %old_value) (3) |
323 | /// |
324 | /// ; Store the new_value into old_ptr |
325 | /// store i8* %new_value, i8** %old_ptr (4) |
326 | /// |
327 | /// The safety of this optimization is based around the following |
328 | /// considerations: |
329 | /// |
330 | /// 1. We are forming the store strong at the store. Thus to perform this |
331 | /// optimization it must be safe to move the retain, load, and release to |
332 | /// (4). |
333 | /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are |
334 | /// safe. |
335 | void ObjCARCContract::tryToContractReleaseIntoStoreStrong( |
336 | Instruction *Release, inst_iterator &Iter, |
337 | const DenseMap<BasicBlock *, ColorVector> &BlockColors) { |
338 | // See if we are releasing something that we just loaded. |
339 | auto *Load = dyn_cast<LoadInst>(Val: GetArgRCIdentityRoot(Inst: Release)); |
340 | if (!Load || !Load->isSimple()) |
341 | return; |
342 | |
343 | // For now, require everything to be in one basic block. |
344 | BasicBlock *BB = Release->getParent(); |
345 | if (Load->getParent() != BB) |
346 | return; |
347 | |
348 | // First scan down the BB from Load, looking for a store of the RCIdentityRoot |
349 | // of Load's |
350 | StoreInst *Store = |
351 | findSafeStoreForStoreStrongContraction(Load, Release, PA, AA); |
352 | // If we fail, bail. |
353 | if (!Store) |
354 | return; |
355 | |
356 | // Then find what new_value's RCIdentity Root is. |
357 | Value *New = GetRCIdentityRoot(V: Store->getValueOperand()); |
358 | |
359 | // Then walk up the BB and look for a retain on New without any intervening |
360 | // instructions which conservatively might decrement ref counts. |
361 | Instruction *Retain = |
362 | findRetainForStoreStrongContraction(New, Store, Release, PA); |
363 | |
364 | // If we fail, bail. |
365 | if (!Retain) |
366 | return; |
367 | |
368 | Changed = true; |
369 | ++NumStoreStrongs; |
370 | |
371 | LLVM_DEBUG( |
372 | llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n" |
373 | << " Old:\n" |
374 | << " Store: " << *Store << "\n" |
375 | << " Release: " << *Release << "\n" |
376 | << " Retain: " << *Retain << "\n" |
377 | << " Load: " << *Load << "\n" ); |
378 | |
379 | LLVMContext &C = Release->getContext(); |
380 | Type *I8X = PointerType::getUnqual(ElementType: Type::getInt8Ty(C)); |
381 | Type *I8XX = PointerType::getUnqual(ElementType: I8X); |
382 | |
383 | Value *Args[] = { Load->getPointerOperand(), New }; |
384 | if (Args[0]->getType() != I8XX) |
385 | Args[0] = new BitCastInst(Args[0], I8XX, "" , Store->getIterator()); |
386 | if (Args[1]->getType() != I8X) |
387 | Args[1] = new BitCastInst(Args[1], I8X, "" , Store->getIterator()); |
388 | Function *Decl = EP.get(kind: ARCRuntimeEntryPointKind::StoreStrong); |
389 | CallInst *StoreStrong = objcarc::createCallInstWithColors( |
390 | Func: Decl, Args, NameStr: "" , InsertBefore: Store->getIterator(), BlockColors); |
391 | StoreStrong->setDoesNotThrow(); |
392 | StoreStrong->setDebugLoc(Store->getDebugLoc()); |
393 | |
394 | // We can't set the tail flag yet, because we haven't yet determined |
395 | // whether there are any escaping allocas. Remember this call, so that |
396 | // we can set the tail flag once we know it's safe. |
397 | StoreStrongCalls.insert(Ptr: StoreStrong); |
398 | |
399 | LLVM_DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong |
400 | << "\n" ); |
401 | |
402 | if (&*Iter == Retain) ++Iter; |
403 | if (&*Iter == Store) ++Iter; |
404 | Store->eraseFromParent(); |
405 | Release->eraseFromParent(); |
406 | EraseInstruction(CI: Retain); |
407 | if (Load->use_empty()) |
408 | Load->eraseFromParent(); |
409 | } |
410 | |
411 | bool ObjCARCContract::tryToPeepholeInstruction( |
412 | Function &F, Instruction *Inst, inst_iterator &Iter, |
413 | bool &TailOkForStoreStrongs, |
414 | const DenseMap<BasicBlock *, ColorVector> &BlockColors) { |
415 | // Only these library routines return their argument. In particular, |
416 | // objc_retainBlock does not necessarily return its argument. |
417 | ARCInstKind Class = GetBasicARCInstKind(V: Inst); |
418 | switch (Class) { |
419 | case ARCInstKind::FusedRetainAutorelease: |
420 | case ARCInstKind::FusedRetainAutoreleaseRV: |
421 | return false; |
422 | case ARCInstKind::Autorelease: |
423 | case ARCInstKind::AutoreleaseRV: |
424 | return contractAutorelease(F, Autorelease: Inst, Class); |
425 | case ARCInstKind::Retain: |
426 | // Attempt to convert retains to retainrvs if they are next to function |
427 | // calls. |
428 | if (!optimizeRetainCall(F, Retain: Inst)) |
429 | return false; |
430 | // If we succeed in our optimization, fall through. |
431 | [[fallthrough]]; |
432 | case ARCInstKind::RetainRV: |
433 | case ARCInstKind::UnsafeClaimRV: { |
434 | // Return true if this is a bundled retainRV/claimRV call, which is always |
435 | // redundant with the attachedcall in the bundle, and is going to be erased |
436 | // at the end of this pass. This avoids undoing objc-arc-expand and |
437 | // replacing uses of the retainRV/claimRV call's argument with its result. |
438 | if (BundledInsts->contains(I: Inst)) |
439 | return true; |
440 | |
441 | // If this isn't a bundled call, and the target doesn't need a special |
442 | // inline-asm marker, we're done: return now, and undo objc-arc-expand. |
443 | if (!RVInstMarker) |
444 | return false; |
445 | |
446 | // The target needs a special inline-asm marker. Insert it. |
447 | |
448 | BasicBlock::iterator BBI = Inst->getIterator(); |
449 | BasicBlock *InstParent = Inst->getParent(); |
450 | |
451 | // Step up to see if the call immediately precedes the RV call. |
452 | // If it's an invoke, we have to cross a block boundary. And we have |
453 | // to carefully dodge no-op instructions. |
454 | do { |
455 | if (BBI == InstParent->begin()) { |
456 | BasicBlock *Pred = InstParent->getSinglePredecessor(); |
457 | if (!Pred) |
458 | goto decline_rv_optimization; |
459 | BBI = Pred->getTerminator()->getIterator(); |
460 | break; |
461 | } |
462 | --BBI; |
463 | } while (IsNoopInstruction(I: &*BBI)); |
464 | |
465 | if (GetRCIdentityRoot(V: &*BBI) == GetArgRCIdentityRoot(Inst)) { |
466 | LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value " |
467 | "optimization.\n" ); |
468 | Changed = true; |
469 | InlineAsm *IA = |
470 | InlineAsm::get(Ty: FunctionType::get(Result: Type::getVoidTy(C&: Inst->getContext()), |
471 | /*isVarArg=*/false), |
472 | AsmString: RVInstMarker->getString(), |
473 | /*Constraints=*/"" , /*hasSideEffects=*/true); |
474 | |
475 | objcarc::createCallInstWithColors(Func: IA, Args: std::nullopt, NameStr: "" , |
476 | InsertBefore: Inst->getIterator(), BlockColors); |
477 | } |
478 | decline_rv_optimization: |
479 | return false; |
480 | } |
481 | case ARCInstKind::InitWeak: { |
482 | // objc_initWeak(p, null) => *p = null |
483 | CallInst *CI = cast<CallInst>(Val: Inst); |
484 | if (IsNullOrUndef(V: CI->getArgOperand(i: 1))) { |
485 | Value *Null = ConstantPointerNull::get(T: cast<PointerType>(Val: CI->getType())); |
486 | Changed = true; |
487 | new StoreInst(Null, CI->getArgOperand(i: 0), CI->getIterator()); |
488 | |
489 | LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n" |
490 | << " New = " << *Null << "\n" ); |
491 | |
492 | CI->replaceAllUsesWith(V: Null); |
493 | CI->eraseFromParent(); |
494 | } |
495 | return true; |
496 | } |
497 | case ARCInstKind::Release: |
498 | // Try to form an objc store strong from our release. If we fail, there is |
499 | // nothing further to do below, so continue. |
500 | tryToContractReleaseIntoStoreStrong(Release: Inst, Iter, BlockColors); |
501 | return true; |
502 | case ARCInstKind::User: |
503 | // Be conservative if the function has any alloca instructions. |
504 | // Technically we only care about escaping alloca instructions, |
505 | // but this is sufficient to handle some interesting cases. |
506 | if (isa<AllocaInst>(Val: Inst)) |
507 | TailOkForStoreStrongs = false; |
508 | return true; |
509 | case ARCInstKind::IntrinsicUser: |
510 | // Remove calls to @llvm.objc.clang.arc.use(...). |
511 | Changed = true; |
512 | Inst->eraseFromParent(); |
513 | return true; |
514 | default: |
515 | if (auto *CI = dyn_cast<CallInst>(Val: Inst)) |
516 | if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) { |
517 | // Remove calls to @llvm.objc.clang.arc.noop.use(...). |
518 | Changed = true; |
519 | CI->eraseFromParent(); |
520 | } |
521 | return true; |
522 | } |
523 | } |
524 | |
525 | //===----------------------------------------------------------------------===// |
526 | // Top Level Driver |
527 | //===----------------------------------------------------------------------===// |
528 | |
529 | bool ObjCARCContract::init(Module &M) { |
530 | EP.init(M: &M); |
531 | |
532 | // Initialize RVInstMarker. |
533 | RVInstMarker = getRVInstMarker(M); |
534 | |
535 | return false; |
536 | } |
537 | |
538 | bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) { |
539 | if (!EnableARCOpts) |
540 | return false; |
541 | |
542 | Changed = CFGChanged = false; |
543 | AA = A; |
544 | DT = D; |
545 | PA.setAA(A); |
546 | BundledRetainClaimRVs BRV(/*ContractPass=*/true); |
547 | BundledInsts = &BRV; |
548 | |
549 | std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT); |
550 | Changed |= R.first; |
551 | CFGChanged |= R.second; |
552 | |
553 | DenseMap<BasicBlock *, ColorVector> BlockColors; |
554 | if (F.hasPersonalityFn() && |
555 | isScopedEHPersonality(Pers: classifyEHPersonality(Pers: F.getPersonalityFn()))) |
556 | BlockColors = colorEHFunclets(F); |
557 | |
558 | LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n" ); |
559 | |
560 | // Track whether it's ok to mark objc_storeStrong calls with the "tail" |
561 | // keyword. Be conservative if the function has variadic arguments. |
562 | // It seems that functions which "return twice" are also unsafe for the |
563 | // "tail" argument, because they are setjmp, which could need to |
564 | // return to an earlier stack state. |
565 | bool TailOkForStoreStrongs = |
566 | !F.isVarArg() && !F.callsFunctionThatReturnsTwice(); |
567 | |
568 | // For ObjC library calls which return their argument, replace uses of the |
569 | // argument with uses of the call return value, if it dominates the use. This |
570 | // reduces register pressure. |
571 | for (inst_iterator I = inst_begin(F: &F), E = inst_end(F: &F); I != E;) { |
572 | Instruction *Inst = &*I++; |
573 | |
574 | LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n" ); |
575 | |
576 | if (auto *CI = dyn_cast<CallInst>(Val: Inst)) |
577 | if (objcarc::hasAttachedCallOpBundle(CB: CI)) { |
578 | BundledInsts->insertRVCallWithColors(InsertPt: I->getIterator(), AnnotatedCall: CI, BlockColors); |
579 | --I; |
580 | Changed = true; |
581 | } |
582 | |
583 | // First try to peephole Inst. If there is nothing further we can do in |
584 | // terms of undoing objc-arc-expand, process the next inst. |
585 | if (tryToPeepholeInstruction(F, Inst, Iter&: I, TailOkForStoreStrongs, |
586 | BlockColors)) |
587 | continue; |
588 | |
589 | // Otherwise, try to undo objc-arc-expand. |
590 | |
591 | // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts |
592 | // and such; to do the replacement, the argument must have type i8*. |
593 | |
594 | // Function for replacing uses of Arg dominated by Inst. |
595 | auto ReplaceArgUses = [Inst, this](Value *Arg) { |
596 | // If we're compiling bugpointed code, don't get in trouble. |
597 | if (!isa<Instruction>(Val: Arg) && !isa<Argument>(Val: Arg)) |
598 | return; |
599 | |
600 | // Look through the uses of the pointer. |
601 | for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); |
602 | UI != UE; ) { |
603 | // Increment UI now, because we may unlink its element. |
604 | Use &U = *UI++; |
605 | unsigned OperandNo = U.getOperandNo(); |
606 | |
607 | // If the call's return value dominates a use of the call's argument |
608 | // value, rewrite the use to use the return value. We check for |
609 | // reachability here because an unreachable call is considered to |
610 | // trivially dominate itself, which would lead us to rewriting its |
611 | // argument in terms of its return value, which would lead to |
612 | // infinite loops in GetArgRCIdentityRoot. |
613 | if (!DT->isReachableFromEntry(U) || !DT->dominates(Def: Inst, U)) |
614 | continue; |
615 | |
616 | Changed = true; |
617 | Instruction *Replacement = Inst; |
618 | Type *UseTy = U.get()->getType(); |
619 | if (PHINode *PHI = dyn_cast<PHINode>(Val: U.getUser())) { |
620 | // For PHI nodes, insert the bitcast in the predecessor block. |
621 | unsigned ValNo = PHINode::getIncomingValueNumForOperand(i: OperandNo); |
622 | BasicBlock *IncomingBB = PHI->getIncomingBlock(i: ValNo); |
623 | if (Replacement->getType() != UseTy) { |
624 | // A catchswitch is both a pad and a terminator, meaning a basic |
625 | // block with a catchswitch has no insertion point. Keep going up |
626 | // the dominator tree until we find a non-catchswitch. |
627 | BasicBlock *InsertBB = IncomingBB; |
628 | while (isa<CatchSwitchInst>(Val: InsertBB->getFirstNonPHI())) { |
629 | InsertBB = DT->getNode(BB: InsertBB)->getIDom()->getBlock(); |
630 | } |
631 | |
632 | assert(DT->dominates(Inst, &InsertBB->back()) && |
633 | "Invalid insertion point for bitcast" ); |
634 | Replacement = new BitCastInst(Replacement, UseTy, "" , |
635 | InsertBB->back().getIterator()); |
636 | } |
637 | |
638 | // While we're here, rewrite all edges for this PHI, rather |
639 | // than just one use at a time, to minimize the number of |
640 | // bitcasts we emit. |
641 | for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i) |
642 | if (PHI->getIncomingBlock(i) == IncomingBB) { |
643 | // Keep the UI iterator valid. |
644 | if (UI != UE && |
645 | &PHI->getOperandUse( |
646 | i: PHINode::getOperandNumForIncomingValue(i)) == &*UI) |
647 | ++UI; |
648 | PHI->setIncomingValue(i, V: Replacement); |
649 | } |
650 | } else { |
651 | if (Replacement->getType() != UseTy) |
652 | Replacement = |
653 | new BitCastInst(Replacement, UseTy, "" , |
654 | cast<Instruction>(Val: U.getUser())->getIterator()); |
655 | U.set(Replacement); |
656 | } |
657 | } |
658 | }; |
659 | |
660 | Value *Arg = cast<CallInst>(Val: Inst)->getArgOperand(i: 0); |
661 | Value *OrigArg = Arg; |
662 | |
663 | // TODO: Change this to a do-while. |
664 | for (;;) { |
665 | ReplaceArgUses(Arg); |
666 | |
667 | // If Arg is a no-op casted pointer, strip one level of casts and iterate. |
668 | if (const BitCastInst *BI = dyn_cast<BitCastInst>(Val: Arg)) |
669 | Arg = BI->getOperand(i_nocapture: 0); |
670 | else if (isa<GEPOperator>(Val: Arg) && |
671 | cast<GEPOperator>(Val: Arg)->hasAllZeroIndices()) |
672 | Arg = cast<GEPOperator>(Val: Arg)->getPointerOperand(); |
673 | else if (isa<GlobalAlias>(Val: Arg) && |
674 | !cast<GlobalAlias>(Val: Arg)->isInterposable()) |
675 | Arg = cast<GlobalAlias>(Val: Arg)->getAliasee(); |
676 | else { |
677 | // If Arg is a PHI node, get PHIs that are equivalent to it and replace |
678 | // their uses. |
679 | if (PHINode *PN = dyn_cast<PHINode>(Val: Arg)) { |
680 | SmallVector<Value *, 1> PHIList; |
681 | getEquivalentPHIs(PN&: *PN, PHIList); |
682 | for (Value *PHI : PHIList) |
683 | ReplaceArgUses(PHI); |
684 | } |
685 | break; |
686 | } |
687 | } |
688 | |
689 | // Replace bitcast users of Arg that are dominated by Inst. |
690 | SmallVector<BitCastInst *, 2> BitCastUsers; |
691 | |
692 | // Add all bitcast users of the function argument first. |
693 | for (User *U : OrigArg->users()) |
694 | if (auto *BC = dyn_cast<BitCastInst>(Val: U)) |
695 | BitCastUsers.push_back(Elt: BC); |
696 | |
697 | // Replace the bitcasts with the call return. Iterate until list is empty. |
698 | while (!BitCastUsers.empty()) { |
699 | auto *BC = BitCastUsers.pop_back_val(); |
700 | for (User *U : BC->users()) |
701 | if (auto *B = dyn_cast<BitCastInst>(Val: U)) |
702 | BitCastUsers.push_back(Elt: B); |
703 | |
704 | ReplaceArgUses(BC); |
705 | } |
706 | } |
707 | |
708 | // If this function has no escaping allocas or suspicious vararg usage, |
709 | // objc_storeStrong calls can be marked with the "tail" keyword. |
710 | if (TailOkForStoreStrongs) |
711 | for (CallInst *CI : StoreStrongCalls) |
712 | CI->setTailCall(); |
713 | StoreStrongCalls.clear(); |
714 | |
715 | return Changed; |
716 | } |
717 | |
718 | //===----------------------------------------------------------------------===// |
719 | // Misc Pass Manager |
720 | //===----------------------------------------------------------------------===// |
721 | |
722 | char ObjCARCContractLegacyPass::ID = 0; |
723 | INITIALIZE_PASS_BEGIN(ObjCARCContractLegacyPass, "objc-arc-contract" , |
724 | "ObjC ARC contraction" , false, false) |
725 | INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass) |
726 | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) |
727 | INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract" , |
728 | "ObjC ARC contraction" , false, false) |
729 | |
730 | void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const { |
731 | AU.addRequired<AAResultsWrapperPass>(); |
732 | AU.addRequired<DominatorTreeWrapperPass>(); |
733 | } |
734 | |
735 | Pass *llvm::createObjCARCContractPass() { |
736 | return new ObjCARCContractLegacyPass(); |
737 | } |
738 | |
739 | bool ObjCARCContractLegacyPass::runOnFunction(Function &F) { |
740 | ObjCARCContract OCARCC; |
741 | OCARCC.init(M&: *F.getParent()); |
742 | auto *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); |
743 | auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); |
744 | return OCARCC.run(F, A: AA, D: DT); |
745 | } |
746 | |
747 | PreservedAnalyses ObjCARCContractPass::run(Function &F, |
748 | FunctionAnalysisManager &AM) { |
749 | ObjCARCContract OCAC; |
750 | OCAC.init(M&: *F.getParent()); |
751 | |
752 | bool Changed = OCAC.run(F, A: &AM.getResult<AAManager>(IR&: F), |
753 | D: &AM.getResult<DominatorTreeAnalysis>(IR&: F)); |
754 | bool CFGChanged = OCAC.hasCFGChanged(); |
755 | if (Changed) { |
756 | PreservedAnalyses PA; |
757 | if (!CFGChanged) |
758 | PA.preserveSet<CFGAnalyses>(); |
759 | return PA; |
760 | } |
761 | return PreservedAnalyses::all(); |
762 | } |
763 | |