/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===// |
2 | | // |
3 | | // The LLVM Compiler Infrastructure |
4 | | // |
5 | | // This file is distributed under the University of Illinois Open Source |
6 | | // License. See LICENSE.TXT for details. |
7 | | // |
8 | | //===----------------------------------------------------------------------===// |
9 | | /// \file |
10 | | /// This file defines late ObjC ARC optimizations. ARC stands for Automatic |
11 | | /// Reference Counting and is a system for managing reference counts for objects |
12 | | /// in Objective C. |
13 | | /// |
14 | | /// This specific file mainly deals with ``contracting'' multiple lower level |
15 | | /// operations into singular higher level operations through pattern matching. |
16 | | /// |
17 | | /// WARNING: This file knows about certain library functions. It recognizes them |
18 | | /// by name, and hardwires knowledge of their semantics. |
19 | | /// |
20 | | /// WARNING: This file knows about how certain Objective-C library functions are |
21 | | /// used. Naive LLVM IR transformations which would otherwise be |
22 | | /// behavior-preserving may break these assumptions. |
23 | | /// |
24 | | //===----------------------------------------------------------------------===// |
25 | | |
26 | | // TODO: ObjCARCContract could insert PHI nodes when uses aren't |
27 | | // dominated by single calls. |
28 | | |
29 | | #include "ARCRuntimeEntryPoints.h" |
30 | | #include "DependencyAnalysis.h" |
31 | | #include "ObjCARC.h" |
32 | | #include "ProvenanceAnalysis.h" |
33 | | #include "llvm/ADT/Statistic.h" |
34 | | #include "llvm/IR/Dominators.h" |
35 | | #include "llvm/IR/InlineAsm.h" |
36 | | #include "llvm/IR/Operator.h" |
37 | | #include "llvm/Support/Debug.h" |
38 | | #include "llvm/Support/raw_ostream.h" |
39 | | |
40 | | using namespace llvm; |
41 | | using namespace llvm::objcarc; |
42 | | |
43 | | #define DEBUG_TYPE "objc-arc-contract" |
44 | | |
45 | | STATISTIC(NumPeeps, "Number of calls peephole-optimized"); |
46 | | STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed"); |
47 | | |
48 | | //===----------------------------------------------------------------------===// |
49 | | // Declarations |
50 | | //===----------------------------------------------------------------------===// |
51 | | |
52 | | namespace { |
53 | | /// \brief Late ARC optimizations |
54 | | /// |
55 | | /// These change the IR in a way that makes it difficult to be analyzed by |
56 | | /// ObjCARCOpt, so it's run late. |
57 | | class ObjCARCContract : public FunctionPass { |
58 | | bool Changed; |
59 | | AliasAnalysis *AA; |
60 | | DominatorTree *DT; |
61 | | ProvenanceAnalysis PA; |
62 | | ARCRuntimeEntryPoints EP; |
63 | | |
64 | | /// A flag indicating whether this optimization pass should run. |
65 | | bool Run; |
66 | | |
67 | | /// The inline asm string to insert between calls and RetainRV calls to make |
68 | | /// the optimization work on targets which need it. |
69 | | const MDString *RVInstMarker; |
70 | | |
71 | | /// The set of inserted objc_storeStrong calls. If at the end of walking the |
72 | | /// function we have found no alloca instructions, these calls can be marked |
73 | | /// "tail". |
74 | | SmallPtrSet<CallInst *, 8> StoreStrongCalls; |
75 | | |
76 | | /// Returns true if we eliminated Inst. |
77 | | bool tryToPeepholeInstruction(Function &F, Instruction *Inst, |
78 | | inst_iterator &Iter, |
79 | | SmallPtrSetImpl<Instruction *> &DepInsts, |
80 | | SmallPtrSetImpl<const BasicBlock *> &Visited, |
81 | | bool &TailOkForStoreStrong); |
82 | | |
83 | | bool optimizeRetainCall(Function &F, Instruction *Retain); |
84 | | |
85 | | bool |
86 | | contractAutorelease(Function &F, Instruction *Autorelease, |
87 | | ARCInstKind Class, |
88 | | SmallPtrSetImpl<Instruction *> &DependingInstructions, |
89 | | SmallPtrSetImpl<const BasicBlock *> &Visited); |
90 | | |
91 | | void tryToContractReleaseIntoStoreStrong(Instruction *Release, |
92 | | inst_iterator &Iter); |
93 | | |
94 | | void getAnalysisUsage(AnalysisUsage &AU) const override; |
95 | | bool doInitialization(Module &M) override; |
96 | | bool runOnFunction(Function &F) override; |
97 | | |
98 | | public: |
99 | | static char ID; |
100 | 16.6k | ObjCARCContract() : FunctionPass(ID) { |
101 | 16.6k | initializeObjCARCContractPass(*PassRegistry::getPassRegistry()); |
102 | 16.6k | } |
103 | | }; |
104 | | } |
105 | | |
106 | | //===----------------------------------------------------------------------===// |
107 | | // Implementation |
108 | | //===----------------------------------------------------------------------===// |
109 | | |
110 | | /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a |
111 | | /// return value. We do this late so we do not disrupt the dataflow analysis in |
112 | | /// ObjCARCOpt. |
113 | 38 | bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) { |
114 | 38 | ImmutableCallSite CS(GetArgRCIdentityRoot(Retain)); |
115 | 38 | const Instruction *Call = CS.getInstruction(); |
116 | 38 | if (!Call) |
117 | 26 | return false; |
118 | 12 | if (12 Call->getParent() != Retain->getParent()12 ) |
119 | 1 | return false; |
120 | 11 | |
121 | 11 | // Check that the call is next to the retain. |
122 | 11 | BasicBlock::const_iterator I = ++Call->getIterator(); |
123 | 17 | while (IsNoopInstruction(&*I)) |
124 | 6 | ++I; |
125 | 11 | if (&*I != Retain) |
126 | 1 | return false; |
127 | 10 | |
128 | 10 | // Turn it to an objc_retainAutoreleasedReturnValue. |
129 | 10 | Changed = true; |
130 | 10 | ++NumPeeps; |
131 | 10 | |
132 | 10 | DEBUG(dbgs() << "Transforming objc_retain => " |
133 | 10 | "objc_retainAutoreleasedReturnValue since the operand is a " |
134 | 10 | "return value.\nOld: "<< *Retain << "\n"); |
135 | 10 | |
136 | 10 | // We do not have to worry about tail calls/does not throw since |
137 | 10 | // retain/retainRV have the same properties. |
138 | 10 | Constant *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV); |
139 | 10 | cast<CallInst>(Retain)->setCalledFunction(Decl); |
140 | 10 | |
141 | 10 | DEBUG(dbgs() << "New: " << *Retain << "\n"); |
142 | 38 | return true; |
143 | 38 | } |
144 | | |
145 | | /// Merge an autorelease with a retain into a fused call. |
146 | | bool ObjCARCContract::contractAutorelease( |
147 | | Function &F, Instruction *Autorelease, ARCInstKind Class, |
148 | | SmallPtrSetImpl<Instruction *> &DependingInstructions, |
149 | 13 | SmallPtrSetImpl<const BasicBlock *> &Visited) { |
150 | 13 | const Value *Arg = GetArgRCIdentityRoot(Autorelease); |
151 | 13 | |
152 | 13 | // Check that there are no instructions between the retain and the autorelease |
153 | 13 | // (such as an autorelease_pop) which may change the count. |
154 | 13 | CallInst *Retain = nullptr; |
155 | 13 | if (Class == ARCInstKind::AutoreleaseRV) |
156 | 6 | FindDependencies(RetainAutoreleaseRVDep, Arg, |
157 | 6 | Autorelease->getParent(), Autorelease, |
158 | 6 | DependingInstructions, Visited, PA); |
159 | 13 | else |
160 | 7 | FindDependencies(RetainAutoreleaseDep, Arg, |
161 | 7 | Autorelease->getParent(), Autorelease, |
162 | 7 | DependingInstructions, Visited, PA); |
163 | 13 | |
164 | 13 | Visited.clear(); |
165 | 13 | if (DependingInstructions.size() != 113 ) { |
166 | 1 | DependingInstructions.clear(); |
167 | 1 | return false; |
168 | 1 | } |
169 | 12 | |
170 | 12 | Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); |
171 | 12 | DependingInstructions.clear(); |
172 | 12 | |
173 | 12 | if (!Retain || 12 GetBasicARCInstKind(Retain) != ARCInstKind::Retain10 || |
174 | 6 | GetArgRCIdentityRoot(Retain) != Arg) |
175 | 6 | return false; |
176 | 6 | |
177 | 6 | Changed = true; |
178 | 6 | ++NumPeeps; |
179 | 6 | |
180 | 6 | DEBUG(dbgs() << " Fusing retain/autorelease!\n" |
181 | 6 | " Autorelease:" << *Autorelease << "\n" |
182 | 6 | " Retain: " << *Retain << "\n"); |
183 | 6 | |
184 | 6 | Constant *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV |
185 | 2 | ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV |
186 | 4 | : ARCRuntimeEntryPointKind::RetainAutorelease); |
187 | 6 | Retain->setCalledFunction(Decl); |
188 | 6 | |
189 | 6 | DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n"); |
190 | 13 | |
191 | 13 | EraseInstruction(Autorelease); |
192 | 13 | return true; |
193 | 13 | } |
194 | | |
195 | | static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, |
196 | | Instruction *Release, |
197 | | ProvenanceAnalysis &PA, |
198 | 14 | AliasAnalysis *AA) { |
199 | 14 | StoreInst *Store = nullptr; |
200 | 14 | bool SawRelease = false; |
201 | 14 | |
202 | 14 | // Get the location associated with Load. |
203 | 14 | MemoryLocation Loc = MemoryLocation::get(Load); |
204 | 14 | auto *LocPtr = Loc.Ptr->stripPointerCasts(); |
205 | 14 | |
206 | 14 | // Walk down to find the store and the release, which may be in either order. |
207 | 14 | for (auto I = std::next(BasicBlock::iterator(Load)), |
208 | 14 | E = Load->getParent()->end(); |
209 | 50 | I != E50 ; ++I36 ) { |
210 | 49 | // If we found the store we were looking for and saw the release, |
211 | 49 | // break. There is no more work to be done. |
212 | 49 | if (Store && 49 SawRelease16 ) |
213 | 10 | break; |
214 | 39 | |
215 | 39 | // Now we know that we have not seen either the store or the release. If I |
216 | 39 | // is the release, mark that we saw the release and continue. |
217 | 39 | Instruction *Inst = &*I; |
218 | 39 | if (Inst == Release39 ) { |
219 | 11 | SawRelease = true; |
220 | 11 | continue; |
221 | 11 | } |
222 | 28 | |
223 | 28 | // Otherwise, we check if Inst is a "good" store. Grab the instruction class |
224 | 28 | // of Inst. |
225 | 28 | ARCInstKind Class = GetBasicARCInstKind(Inst); |
226 | 28 | |
227 | 28 | // If Inst is an unrelated retain, we don't care about it. |
228 | 28 | // |
229 | 28 | // TODO: This is one area where the optimization could be made more |
230 | 28 | // aggressive. |
231 | 28 | if (IsRetain(Class)) |
232 | 2 | continue; |
233 | 26 | |
234 | 26 | // If we have seen the store, but not the release... |
235 | 26 | if (26 Store26 ) { |
236 | 2 | // We need to make sure that it is safe to move the release from its |
237 | 2 | // current position to the store. This implies proving that any |
238 | 2 | // instruction in between Store and the Release conservatively can not use |
239 | 2 | // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so |
240 | 2 | // continue... |
241 | 2 | if (!CanUse(Inst, Load, PA, Class)2 ) { |
242 | 0 | continue; |
243 | 0 | } |
244 | 2 | |
245 | 2 | // Otherwise, be conservative and return nullptr. |
246 | 2 | return nullptr; |
247 | 2 | } |
248 | 24 | |
249 | 24 | // Ok, now we know we have not seen a store yet. See if Inst can write to |
250 | 24 | // our load location, if it can not, just ignore the instruction. |
251 | 24 | if (24 !(AA->getModRefInfo(Inst, Loc) & MRI_Mod)24 ) |
252 | 11 | continue; |
253 | 13 | |
254 | 13 | Store = dyn_cast<StoreInst>(Inst); |
255 | 13 | |
256 | 13 | // If Inst can, then check if Inst is a simple store. If Inst is not a |
257 | 13 | // store or a store that is not simple, then we have some we do not |
258 | 13 | // understand writing to this memory implying we can not move the load |
259 | 13 | // over the write to any subsequent store that we may find. |
260 | 13 | if (!Store || 13 !Store->isSimple()13 ) |
261 | 1 | return nullptr; |
262 | 12 | |
263 | 12 | // Then make sure that the pointer we are storing to is Ptr. If so, we |
264 | 12 | // found our Store! |
265 | 12 | if (12 Store->getPointerOperand()->stripPointerCasts() == LocPtr12 ) |
266 | 12 | continue; |
267 | 0 |
|
268 | 0 | // Otherwise, we have an unknown store to some other ptr that clobbers |
269 | 0 | // Loc.Ptr. Bail! |
270 | 0 | return nullptr; |
271 | 0 | } |
272 | 14 | |
273 | 14 | // If we did not find the store or did not see the release, fail. |
274 | 11 | if (11 !Store || 11 !SawRelease10 ) |
275 | 1 | return nullptr; |
276 | 10 | |
277 | 10 | // We succeeded! |
278 | 10 | return Store; |
279 | 10 | } |
280 | | |
281 | | static Instruction * |
282 | | findRetainForStoreStrongContraction(Value *New, StoreInst *Store, |
283 | | Instruction *Release, |
284 | 10 | ProvenanceAnalysis &PA) { |
285 | 10 | // Walk up from the Store to find the retain. |
286 | 10 | BasicBlock::iterator I = Store->getIterator(); |
287 | 10 | BasicBlock::iterator Begin = Store->getParent()->begin(); |
288 | 43 | while (I != Begin && 43 GetBasicARCInstKind(&*I) != ARCInstKind::Retain37 ) { |
289 | 35 | Instruction *Inst = &*I; |
290 | 35 | |
291 | 35 | // It is only safe to move the retain to the store if we can prove |
292 | 35 | // conservatively that nothing besides the release can decrement reference |
293 | 35 | // counts in between the retain and the store. |
294 | 35 | if (CanDecrementRefCount(Inst, New, PA) && 35 Inst != Release7 ) |
295 | 2 | return nullptr; |
296 | 33 | --I; |
297 | 33 | } |
298 | 8 | Instruction *Retain = &*I; |
299 | 8 | if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain) |
300 | 1 | return nullptr; |
301 | 7 | if (7 GetArgRCIdentityRoot(Retain) != New7 ) |
302 | 0 | return nullptr; |
303 | 7 | return Retain; |
304 | 7 | } |
305 | | |
306 | | /// Attempt to merge an objc_release with a store, load, and objc_retain to form |
307 | | /// an objc_storeStrong. An objc_storeStrong: |
308 | | /// |
309 | | /// objc_storeStrong(i8** %old_ptr, i8* new_value) |
310 | | /// |
311 | | /// is equivalent to the following IR sequence: |
312 | | /// |
313 | | /// ; Load old value. |
314 | | /// %old_value = load i8** %old_ptr (1) |
315 | | /// |
316 | | /// ; Increment the new value and then release the old value. This must occur |
317 | | /// ; in order in case old_value releases new_value in its destructor causing |
318 | | /// ; us to potentially have a dangling ptr. |
319 | | /// tail call i8* @objc_retain(i8* %new_value) (2) |
320 | | /// tail call void @objc_release(i8* %old_value) (3) |
321 | | /// |
322 | | /// ; Store the new_value into old_ptr |
323 | | /// store i8* %new_value, i8** %old_ptr (4) |
324 | | /// |
325 | | /// The safety of this optimization is based around the following |
326 | | /// considerations: |
327 | | /// |
328 | | /// 1. We are forming the store strong at the store. Thus to perform this |
329 | | /// optimization it must be safe to move the retain, load, and release to |
330 | | /// (4). |
331 | | /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are |
332 | | /// safe. |
333 | | void ObjCARCContract::tryToContractReleaseIntoStoreStrong(Instruction *Release, |
334 | 30 | inst_iterator &Iter) { |
335 | 30 | // See if we are releasing something that we just loaded. |
336 | 30 | auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release)); |
337 | 30 | if (!Load || 30 !Load->isSimple()15 ) |
338 | 16 | return; |
339 | 14 | |
340 | 14 | // For now, require everything to be in one basic block. |
341 | 14 | BasicBlock *BB = Release->getParent(); |
342 | 14 | if (Load->getParent() != BB) |
343 | 0 | return; |
344 | 14 | |
345 | 14 | // First scan down the BB from Load, looking for a store of the RCIdentityRoot |
346 | 14 | // of Load's |
347 | 14 | StoreInst *Store = |
348 | 14 | findSafeStoreForStoreStrongContraction(Load, Release, PA, AA); |
349 | 14 | // If we fail, bail. |
350 | 14 | if (!Store) |
351 | 4 | return; |
352 | 10 | |
353 | 10 | // Then find what new_value's RCIdentity Root is. |
354 | 10 | Value *New = GetRCIdentityRoot(Store->getValueOperand()); |
355 | 10 | |
356 | 10 | // Then walk up the BB and look for a retain on New without any intervening |
357 | 10 | // instructions which conservatively might decrement ref counts. |
358 | 10 | Instruction *Retain = |
359 | 10 | findRetainForStoreStrongContraction(New, Store, Release, PA); |
360 | 10 | |
361 | 10 | // If we fail, bail. |
362 | 10 | if (!Retain) |
363 | 3 | return; |
364 | 7 | |
365 | 7 | Changed = true; |
366 | 7 | ++NumStoreStrongs; |
367 | 7 | |
368 | 7 | DEBUG( |
369 | 7 | llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n" |
370 | 7 | << " Old:\n" |
371 | 7 | << " Store: " << *Store << "\n" |
372 | 7 | << " Release: " << *Release << "\n" |
373 | 7 | << " Retain: " << *Retain << "\n" |
374 | 7 | << " Load: " << *Load << "\n"); |
375 | 7 | |
376 | 7 | LLVMContext &C = Release->getContext(); |
377 | 7 | Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); |
378 | 7 | Type *I8XX = PointerType::getUnqual(I8X); |
379 | 7 | |
380 | 7 | Value *Args[] = { Load->getPointerOperand(), New }; |
381 | 7 | if (Args[0]->getType() != I8XX) |
382 | 2 | Args[0] = new BitCastInst(Args[0], I8XX, "", Store); |
383 | 7 | if (Args[1]->getType() != I8X) |
384 | 1 | Args[1] = new BitCastInst(Args[1], I8X, "", Store); |
385 | 7 | Constant *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong); |
386 | 7 | CallInst *StoreStrong = CallInst::Create(Decl, Args, "", Store); |
387 | 7 | StoreStrong->setDoesNotThrow(); |
388 | 7 | StoreStrong->setDebugLoc(Store->getDebugLoc()); |
389 | 7 | |
390 | 7 | // We can't set the tail flag yet, because we haven't yet determined |
391 | 7 | // whether there are any escaping allocas. Remember this call, so that |
392 | 7 | // we can set the tail flag once we know it's safe. |
393 | 7 | StoreStrongCalls.insert(StoreStrong); |
394 | 7 | |
395 | 7 | DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong << "\n"); |
396 | 7 | |
397 | 7 | if (&*Iter == Retain7 ) ++Iter1 ; |
398 | 7 | if (&*Iter == Store7 ) ++Iter1 ; |
399 | 7 | Store->eraseFromParent(); |
400 | 7 | Release->eraseFromParent(); |
401 | 7 | EraseInstruction(Retain); |
402 | 7 | if (Load->use_empty()) |
403 | 3 | Load->eraseFromParent(); |
404 | 30 | } |
405 | | |
406 | | bool ObjCARCContract::tryToPeepholeInstruction( |
407 | | Function &F, Instruction *Inst, inst_iterator &Iter, |
408 | | SmallPtrSetImpl<Instruction *> &DependingInsts, |
409 | | SmallPtrSetImpl<const BasicBlock *> &Visited, |
410 | 421 | bool &TailOkForStoreStrongs) { |
411 | 421 | // Only these library routines return their argument. In particular, |
412 | 421 | // objc_retainBlock does not necessarily return its argument. |
413 | 421 | ARCInstKind Class = GetBasicARCInstKind(Inst); |
414 | 421 | switch (Class) { |
415 | 0 | case ARCInstKind::FusedRetainAutorelease: |
416 | 0 | case ARCInstKind::FusedRetainAutoreleaseRV: |
417 | 0 | return false; |
418 | 13 | case ARCInstKind::Autorelease: |
419 | 13 | case ARCInstKind::AutoreleaseRV: |
420 | 13 | return contractAutorelease(F, Inst, Class, DependingInsts, Visited); |
421 | 38 | case ARCInstKind::Retain: |
422 | 38 | // Attempt to convert retains to retainrvs if they are next to function |
423 | 38 | // calls. |
424 | 38 | if (!optimizeRetainCall(F, Inst)) |
425 | 28 | return false; |
426 | 10 | // If we succeed in our optimization, fall through. |
427 | 10 | LLVM_FALLTHROUGH10 ; |
428 | 17 | case ARCInstKind::RetainRV: |
429 | 17 | case ARCInstKind::ClaimRV: { |
430 | 17 | // If we're compiling for a target which needs a special inline-asm |
431 | 17 | // marker to do the return value optimization, insert it now. |
432 | 17 | if (!RVInstMarker) |
433 | 12 | return false; |
434 | 5 | BasicBlock::iterator BBI = Inst->getIterator(); |
435 | 5 | BasicBlock *InstParent = Inst->getParent(); |
436 | 5 | |
437 | 5 | // Step up to see if the call immediately precedes the RV call. |
438 | 5 | // If it's an invoke, we have to cross a block boundary. And we have |
439 | 5 | // to carefully dodge no-op instructions. |
440 | 9 | do { |
441 | 9 | if (BBI == InstParent->begin()9 ) { |
442 | 2 | BasicBlock *Pred = InstParent->getSinglePredecessor(); |
443 | 2 | if (!Pred) |
444 | 1 | goto decline_rv_optimization; |
445 | 1 | BBI = Pred->getTerminator()->getIterator(); |
446 | 1 | break; |
447 | 1 | } |
448 | 7 | --BBI; |
449 | 7 | } while (IsNoopInstruction(&*BBI)); |
450 | 5 | |
451 | 4 | if (4 &*BBI == GetArgRCIdentityRoot(Inst)4 ) { |
452 | 4 | DEBUG(dbgs() << "Adding inline asm marker for the return value " |
453 | 4 | "optimization.\n"); |
454 | 4 | Changed = true; |
455 | 4 | InlineAsm *IA = InlineAsm::get( |
456 | 4 | FunctionType::get(Type::getVoidTy(Inst->getContext()), |
457 | 4 | /*isVarArg=*/false), |
458 | 4 | RVInstMarker->getString(), |
459 | 4 | /*Constraints=*/"", /*hasSideEffects=*/true); |
460 | 4 | CallInst::Create(IA, "", Inst); |
461 | 4 | } |
462 | 5 | decline_rv_optimization: |
463 | 5 | return false; |
464 | 4 | } |
465 | 1 | case ARCInstKind::InitWeak: { |
466 | 1 | // objc_initWeak(p, null) => *p = null |
467 | 1 | CallInst *CI = cast<CallInst>(Inst); |
468 | 1 | if (IsNullOrUndef(CI->getArgOperand(1))1 ) { |
469 | 1 | Value *Null = |
470 | 1 | ConstantPointerNull::get(cast<PointerType>(CI->getType())); |
471 | 1 | Changed = true; |
472 | 1 | new StoreInst(Null, CI->getArgOperand(0), CI); |
473 | 1 | |
474 | 1 | DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n" |
475 | 1 | << " New = " << *Null << "\n"); |
476 | 1 | |
477 | 1 | CI->replaceAllUsesWith(Null); |
478 | 1 | CI->eraseFromParent(); |
479 | 1 | } |
480 | 1 | return true; |
481 | 4 | } |
482 | 30 | case ARCInstKind::Release: |
483 | 30 | // Try to form an objc store strong from our release. If we fail, there is |
484 | 30 | // nothing further to do below, so continue. |
485 | 30 | tryToContractReleaseIntoStoreStrong(Inst, Iter); |
486 | 30 | return true; |
487 | 250 | case ARCInstKind::User: |
488 | 250 | // Be conservative if the function has any alloca instructions. |
489 | 250 | // Technically we only care about escaping alloca instructions, |
490 | 250 | // but this is sufficient to handle some interesting cases. |
491 | 250 | if (isa<AllocaInst>(Inst)) |
492 | 0 | TailOkForStoreStrongs = false; |
493 | 250 | return true; |
494 | 2 | case ARCInstKind::IntrinsicUser: |
495 | 2 | // Remove calls to @clang.arc.use(...). |
496 | 2 | Inst->eraseFromParent(); |
497 | 2 | return true; |
498 | 80 | default: |
499 | 80 | return true; |
500 | 0 | } |
501 | 0 | } |
502 | | |
503 | | //===----------------------------------------------------------------------===// |
504 | | // Top Level Driver |
505 | | //===----------------------------------------------------------------------===// |
506 | | |
507 | 457k | bool ObjCARCContract::runOnFunction(Function &F) { |
508 | 457k | if (!EnableARCOpts) |
509 | 0 | return false; |
510 | 457k | |
511 | 457k | // If nothing in the Module uses ARC, don't do anything. |
512 | 457k | if (457k !Run457k ) |
513 | 457k | return false; |
514 | 46 | |
515 | 46 | Changed = false; |
516 | 46 | AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); |
517 | 46 | DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); |
518 | 46 | |
519 | 46 | PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults()); |
520 | 46 | |
521 | 46 | DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n"); |
522 | 46 | |
523 | 46 | // Track whether it's ok to mark objc_storeStrong calls with the "tail" |
524 | 46 | // keyword. Be conservative if the function has variadic arguments. |
525 | 46 | // It seems that functions which "return twice" are also unsafe for the |
526 | 46 | // "tail" argument, because they are setjmp, which could need to |
527 | 46 | // return to an earlier stack state. |
528 | 46 | bool TailOkForStoreStrongs = |
529 | 46 | !F.isVarArg() && !F.callsFunctionThatReturnsTwice(); |
530 | 46 | |
531 | 46 | // For ObjC library calls which return their argument, replace uses of the |
532 | 46 | // argument with uses of the call return value, if it dominates the use. This |
533 | 46 | // reduces register pressure. |
534 | 46 | SmallPtrSet<Instruction *, 4> DependingInstructions; |
535 | 46 | SmallPtrSet<const BasicBlock *, 4> Visited; |
536 | 467 | for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E467 ;) { |
537 | 421 | Instruction *Inst = &*I++; |
538 | 421 | |
539 | 421 | DEBUG(dbgs() << "Visiting: " << *Inst << "\n"); |
540 | 421 | |
541 | 421 | // First try to peephole Inst. If there is nothing further we can do in |
542 | 421 | // terms of undoing objc-arc-expand, process the next inst. |
543 | 421 | if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited, |
544 | 421 | TailOkForStoreStrongs)) |
545 | 369 | continue; |
546 | 52 | |
547 | 52 | // Otherwise, try to undo objc-arc-expand. |
548 | 52 | |
549 | 52 | // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts |
550 | 52 | // and such; to do the replacement, the argument must have type i8*. |
551 | 52 | |
552 | 52 | // Function for replacing uses of Arg dominated by Inst. |
553 | 52 | auto ReplaceArgUses = [Inst, this](Value *Arg) 52 { |
554 | 68 | // If we're compiling bugpointed code, don't get in trouble. |
555 | 68 | if (!isa<Instruction>(Arg) && 68 !isa<Argument>(Arg)27 ) |
556 | 1 | return; |
557 | 67 | |
558 | 67 | // Look through the uses of the pointer. |
559 | 67 | for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); |
560 | 175 | UI != UE175 ; ) { |
561 | 108 | // Increment UI now, because we may unlink its element. |
562 | 108 | Use &U = *UI++; |
563 | 108 | unsigned OperandNo = U.getOperandNo(); |
564 | 108 | |
565 | 108 | // If the call's return value dominates a use of the call's argument |
566 | 108 | // value, rewrite the use to use the return value. We check for |
567 | 108 | // reachability here because an unreachable call is considered to |
568 | 108 | // trivially dominate itself, which would lead us to rewriting its |
569 | 108 | // argument in terms of its return value, which would lead to |
570 | 108 | // infinite loops in GetArgRCIdentityRoot. |
571 | 108 | if (DT->isReachableFromEntry(U) && 108 DT->dominates(Inst, U)106 ) { |
572 | 39 | Changed = true; |
573 | 39 | Instruction *Replacement = Inst; |
574 | 39 | Type *UseTy = U.get()->getType(); |
575 | 39 | if (PHINode *PHI39 = dyn_cast<PHINode>(U.getUser())) { |
576 | 6 | // For PHI nodes, insert the bitcast in the predecessor block. |
577 | 6 | unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo); |
578 | 6 | BasicBlock *BB = PHI->getIncomingBlock(ValNo); |
579 | 6 | if (Replacement->getType() != UseTy) |
580 | 3 | Replacement = new BitCastInst(Replacement, UseTy, "", |
581 | 3 | &BB->back()); |
582 | 6 | // While we're here, rewrite all edges for this PHI, rather |
583 | 6 | // than just one use at a time, to minimize the number of |
584 | 6 | // bitcasts we emit. |
585 | 19 | for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e19 ; ++i13 ) |
586 | 13 | if (13 PHI->getIncomingBlock(i) == BB13 ) { |
587 | 7 | // Keep the UI iterator valid. |
588 | 7 | if (UI != UE && |
589 | 6 | &PHI->getOperandUse( |
590 | 6 | PHINode::getOperandNumForIncomingValue(i)) == &*UI) |
591 | 1 | ++UI; |
592 | 13 | PHI->setIncomingValue(i, Replacement); |
593 | 13 | } |
594 | 39 | } else { |
595 | 33 | if (Replacement->getType() != UseTy) |
596 | 3 | Replacement = new BitCastInst(Replacement, UseTy, "", |
597 | 3 | cast<Instruction>(U.getUser())); |
598 | 33 | U.set(Replacement); |
599 | 33 | } |
600 | 39 | } |
601 | 108 | } |
602 | 68 | }; |
603 | 52 | |
604 | 52 | |
605 | 52 | Value *Arg = cast<CallInst>(Inst)->getArgOperand(0); |
606 | 52 | Value *OrigArg = Arg; |
607 | 52 | |
608 | 52 | // TODO: Change this to a do-while. |
609 | 67 | for (;;) { |
610 | 67 | ReplaceArgUses(Arg); |
611 | 67 | |
612 | 67 | // If Arg is a no-op casted pointer, strip one level of casts and iterate. |
613 | 67 | if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg)) |
614 | 15 | Arg = BI->getOperand(0); |
615 | 52 | else if (52 isa<GEPOperator>(Arg) && |
616 | 1 | cast<GEPOperator>(Arg)->hasAllZeroIndices()) |
617 | 0 | Arg = cast<GEPOperator>(Arg)->getPointerOperand(); |
618 | 52 | else if (52 isa<GlobalAlias>(Arg) && |
619 | 0 | !cast<GlobalAlias>(Arg)->isInterposable()) |
620 | 0 | Arg = cast<GlobalAlias>(Arg)->getAliasee(); |
621 | 52 | else |
622 | 52 | break; |
623 | 52 | } |
624 | 52 | |
625 | 52 | // Replace bitcast users of Arg that are dominated by Inst. |
626 | 52 | SmallVector<BitCastInst *, 2> BitCastUsers; |
627 | 52 | |
628 | 52 | // Add all bitcast users of the function argument first. |
629 | 52 | for (User *U : OrigArg->users()) |
630 | 55 | if (auto *55 BC55 = dyn_cast<BitCastInst>(U)) |
631 | 1 | BitCastUsers.push_back(BC); |
632 | 52 | |
633 | 52 | // Replace the bitcasts with the call return. Iterate until list is empty. |
634 | 53 | while (!BitCastUsers.empty()53 ) { |
635 | 1 | auto *BC = BitCastUsers.pop_back_val(); |
636 | 1 | for (User *U : BC->users()) |
637 | 1 | if (auto *1 B1 = dyn_cast<BitCastInst>(U)) |
638 | 0 | BitCastUsers.push_back(B); |
639 | 1 | |
640 | 1 | ReplaceArgUses(BC); |
641 | 1 | } |
642 | 421 | } |
643 | 46 | |
644 | 46 | // If this function has no escaping allocas or suspicious vararg usage, |
645 | 46 | // objc_storeStrong calls can be marked with the "tail" keyword. |
646 | 46 | if (TailOkForStoreStrongs) |
647 | 46 | for (CallInst *CI : StoreStrongCalls) |
648 | 7 | CI->setTailCall(); |
649 | 457k | StoreStrongCalls.clear(); |
650 | 457k | |
651 | 457k | return Changed; |
652 | 457k | } |
653 | | |
654 | | //===----------------------------------------------------------------------===// |
655 | | // Misc Pass Manager |
656 | | //===----------------------------------------------------------------------===// |
657 | | |
658 | | char ObjCARCContract::ID = 0; |
659 | 24.5k | INITIALIZE_PASS_BEGIN24.5k (ObjCARCContract, "objc-arc-contract",
|
660 | 24.5k | "ObjC ARC contraction", false, false) |
661 | 24.5k | INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass) |
662 | 24.5k | INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) |
663 | 24.5k | INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract", |
664 | | "ObjC ARC contraction", false, false) |
665 | | |
666 | 16.6k | void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const { |
667 | 16.6k | AU.addRequired<AAResultsWrapperPass>(); |
668 | 16.6k | AU.addRequired<DominatorTreeWrapperPass>(); |
669 | 16.6k | AU.setPreservesCFG(); |
670 | 16.6k | } |
671 | | |
672 | 16.6k | Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); } |
673 | | |
674 | 16.6k | bool ObjCARCContract::doInitialization(Module &M) { |
675 | 16.6k | // If nothing in the Module uses ARC, don't do anything. |
676 | 16.6k | Run = ModuleHasARC(M); |
677 | 16.6k | if (!Run) |
678 | 16.6k | return false; |
679 | 15 | |
680 | 15 | EP.init(&M); |
681 | 15 | |
682 | 15 | // Initialize RVInstMarker. |
683 | 15 | RVInstMarker = nullptr; |
684 | 15 | if (NamedMDNode *NMD = |
685 | 15 | M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker")) |
686 | 2 | if (2 NMD->getNumOperands() == 12 ) { |
687 | 2 | const MDNode *N = NMD->getOperand(0); |
688 | 2 | if (N->getNumOperands() == 1) |
689 | 2 | if (const MDString *2 S2 = dyn_cast<MDString>(N->getOperand(0))) |
690 | 2 | RVInstMarker = S; |
691 | 2 | } |
692 | 16.6k | |
693 | 16.6k | return false; |
694 | 16.6k | } |