Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
/// \file
9
/// This file defines late ObjC ARC optimizations. ARC stands for Automatic
10
/// Reference Counting and is a system for managing reference counts for objects
11
/// in Objective C.
12
///
13
/// This specific file mainly deals with ``contracting'' multiple lower level
14
/// operations into singular higher level operations through pattern matching.
15
///
16
/// WARNING: This file knows about certain library functions. It recognizes them
17
/// by name, and hardwires knowledge of their semantics.
18
///
19
/// WARNING: This file knows about how certain Objective-C library functions are
20
/// used. Naive LLVM IR transformations which would otherwise be
21
/// behavior-preserving may break these assumptions.
22
///
23
//===----------------------------------------------------------------------===//
24
25
// TODO: ObjCARCContract could insert PHI nodes when uses aren't
26
// dominated by single calls.
27
28
#include "ARCRuntimeEntryPoints.h"
29
#include "DependencyAnalysis.h"
30
#include "ObjCARC.h"
31
#include "ProvenanceAnalysis.h"
32
#include "llvm/ADT/Statistic.h"
33
#include "llvm/Analysis/EHPersonalities.h"
34
#include "llvm/IR/Dominators.h"
35
#include "llvm/IR/InlineAsm.h"
36
#include "llvm/IR/Operator.h"
37
#include "llvm/Support/Debug.h"
38
#include "llvm/Support/raw_ostream.h"
39
40
using namespace llvm;
41
using namespace llvm::objcarc;
42
43
#define DEBUG_TYPE "objc-arc-contract"
44
45
STATISTIC(NumPeeps,       "Number of calls peephole-optimized");
46
STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
47
48
static cl::opt<unsigned> MaxBBSize("arc-contract-max-bb-size", cl::Hidden,
49
    cl::desc("Maximum basic block size to discover the dominance relation of "
50
             "two instructions in the same basic block"), cl::init(65535));
51
52
//===----------------------------------------------------------------------===//
53
//                                Declarations
54
//===----------------------------------------------------------------------===//
55
56
namespace {
57
  /// Late ARC optimizations
58
  ///
59
  /// These change the IR in a way that makes it difficult to be analyzed by
60
  /// ObjCARCOpt, so it's run late.
61
  class ObjCARCContract : public FunctionPass {
62
    bool Changed;
63
    AliasAnalysis *AA;
64
    DominatorTree *DT;
65
    ProvenanceAnalysis PA;
66
    ARCRuntimeEntryPoints EP;
67
68
    /// A flag indicating whether this optimization pass should run.
69
    bool Run;
70
71
    /// The inline asm string to insert between calls and RetainRV calls to make
72
    /// the optimization work on targets which need it.
73
    const MDString *RVInstMarker;
74
75
    /// The set of inserted objc_storeStrong calls. If at the end of walking the
76
    /// function we have found no alloca instructions, these calls can be marked
77
    /// "tail".
78
    SmallPtrSet<CallInst *, 8> StoreStrongCalls;
79
80
    /// Returns true if we eliminated Inst.
81
    bool tryToPeepholeInstruction(
82
        Function &F, Instruction *Inst, inst_iterator &Iter,
83
        SmallPtrSetImpl<Instruction *> &DepInsts,
84
        SmallPtrSetImpl<const BasicBlock *> &Visited,
85
        bool &TailOkForStoreStrong,
86
        const DenseMap<BasicBlock *, ColorVector> &BlockColors);
87
88
    bool optimizeRetainCall(Function &F, Instruction *Retain);
89
90
    bool
91
    contractAutorelease(Function &F, Instruction *Autorelease,
92
                        ARCInstKind Class,
93
                        SmallPtrSetImpl<Instruction *> &DependingInstructions,
94
                        SmallPtrSetImpl<const BasicBlock *> &Visited);
95
96
    void tryToContractReleaseIntoStoreStrong(
97
        Instruction *Release, inst_iterator &Iter,
98
        const DenseMap<BasicBlock *, ColorVector> &BlockColors);
99
100
    void getAnalysisUsage(AnalysisUsage &AU) const override;
101
    bool doInitialization(Module &M) override;
102
    bool runOnFunction(Function &F) override;
103
104
  public:
105
    static char ID;
106
12.0k
    ObjCARCContract() : FunctionPass(ID) {
107
12.0k
      initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
108
12.0k
    }
109
  };
110
}
111
112
//===----------------------------------------------------------------------===//
113
//                               Implementation
114
//===----------------------------------------------------------------------===//
115
116
/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
117
/// return value. We do this late so we do not disrupt the dataflow analysis in
118
/// ObjCARCOpt.
119
42
bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
120
42
  ImmutableCallSite CS(GetArgRCIdentityRoot(Retain));
121
42
  const Instruction *Call = CS.getInstruction();
122
42
  if (!Call)
123
30
    return false;
124
12
  if (Call->getParent() != Retain->getParent())
125
1
    return false;
126
11
127
11
  // Check that the call is next to the retain.
128
11
  BasicBlock::const_iterator I = ++Call->getIterator();
129
17
  while (IsNoopInstruction(&*I))
130
6
    ++I;
131
11
  if (&*I != Retain)
132
1
    return false;
133
10
134
10
  // Turn it to an objc_retainAutoreleasedReturnValue.
135
10
  Changed = true;
136
10
  ++NumPeeps;
137
10
138
10
  LLVM_DEBUG(
139
10
      dbgs() << "Transforming objc_retain => "
140
10
                "objc_retainAutoreleasedReturnValue since the operand is a "
141
10
                "return value.\nOld: "
142
10
             << *Retain << "\n");
143
10
144
10
  // We do not have to worry about tail calls/does not throw since
145
10
  // retain/retainRV have the same properties.
146
10
  Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
147
10
  cast<CallInst>(Retain)->setCalledFunction(Decl);
148
10
149
10
  LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");
150
10
  return true;
151
10
}
152
153
/// Merge an autorelease with a retain into a fused call.
154
bool ObjCARCContract::contractAutorelease(
155
    Function &F, Instruction *Autorelease, ARCInstKind Class,
156
    SmallPtrSetImpl<Instruction *> &DependingInstructions,
157
14
    SmallPtrSetImpl<const BasicBlock *> &Visited) {
158
14
  const Value *Arg = GetArgRCIdentityRoot(Autorelease);
159
14
160
14
  // Check that there are no instructions between the retain and the autorelease
161
14
  // (such as an autorelease_pop) which may change the count.
162
14
  CallInst *Retain = nullptr;
163
14
  if (Class == ARCInstKind::AutoreleaseRV)
164
7
    FindDependencies(RetainAutoreleaseRVDep, Arg,
165
7
                     Autorelease->getParent(), Autorelease,
166
7
                     DependingInstructions, Visited, PA);
167
7
  else
168
7
    FindDependencies(RetainAutoreleaseDep, Arg,
169
7
                     Autorelease->getParent(), Autorelease,
170
7
                     DependingInstructions, Visited, PA);
171
14
172
14
  Visited.clear();
173
14
  if (DependingInstructions.size() != 1) {
174
1
    DependingInstructions.clear();
175
1
    return false;
176
1
  }
177
13
178
13
  Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
179
13
  DependingInstructions.clear();
180
13
181
13
  if (!Retain || 
GetBasicARCInstKind(Retain) != ARCInstKind::Retain10
||
182
13
      
GetArgRCIdentityRoot(Retain) != Arg6
)
183
7
    return false;
184
6
185
6
  Changed = true;
186
6
  ++NumPeeps;
187
6
188
6
  LLVM_DEBUG(dbgs() << "    Fusing retain/autorelease!\n"
189
6
                       "        Autorelease:"
190
6
                    << *Autorelease
191
6
                    << "\n"
192
6
                       "        Retain: "
193
6
                    << *Retain << "\n");
194
6
195
6
  Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
196
6
                              ? 
ARCRuntimeEntryPointKind::RetainAutoreleaseRV2
197
6
                              : 
ARCRuntimeEntryPointKind::RetainAutorelease4
);
198
6
  Retain->setCalledFunction(Decl);
199
6
200
6
  LLVM_DEBUG(dbgs() << "        New RetainAutorelease: " << *Retain << "\n");
201
6
202
6
  EraseInstruction(Autorelease);
203
6
  return true;
204
6
}
205
206
static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
207
                                                         Instruction *Release,
208
                                                         ProvenanceAnalysis &PA,
209
16
                                                         AliasAnalysis *AA) {
210
16
  StoreInst *Store = nullptr;
211
16
  bool SawRelease = false;
212
16
213
16
  // Get the location associated with Load.
214
16
  MemoryLocation Loc = MemoryLocation::get(Load);
215
16
  auto *LocPtr = Loc.Ptr->stripPointerCasts();
216
16
217
16
  // Walk down to find the store and the release, which may be in either order.
218
16
  for (auto I = std::next(BasicBlock::iterator(Load)),
219
16
            E = Load->getParent()->end();
220
56
       I != E; 
++I40
) {
221
55
    // If we found the store we were looking for and saw the release,
222
55
    // break. There is no more work to be done.
223
55
    if (Store && 
SawRelease20
)
224
12
      break;
225
43
226
43
    // Now we know that we have not seen either the store or the release. If I
227
43
    // is the release, mark that we saw the release and continue.
228
43
    Instruction *Inst = &*I;
229
43
    if (Inst == Release) {
230
13
      SawRelease = true;
231
13
      continue;
232
13
    }
233
30
234
30
    // Otherwise, we check if Inst is a "good" store. Grab the instruction class
235
30
    // of Inst.
236
30
    ARCInstKind Class = GetBasicARCInstKind(Inst);
237
30
238
30
    // If Inst is an unrelated retain, we don't care about it.
239
30
    //
240
30
    // TODO: This is one area where the optimization could be made more
241
30
    // aggressive.
242
30
    if (IsRetain(Class))
243
2
      continue;
244
28
245
28
    // If we have seen the store, but not the release...
246
28
    if (Store) {
247
2
      // We need to make sure that it is safe to move the release from its
248
2
      // current position to the store. This implies proving that any
249
2
      // instruction in between Store and the Release conservatively can not use
250
2
      // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
251
2
      // continue...
252
2
      if (!CanUse(Inst, Load, PA, Class)) {
253
0
        continue;
254
0
      }
255
2
256
2
      // Otherwise, be conservative and return nullptr.
257
2
      return nullptr;
258
2
    }
259
26
260
26
    // Ok, now we know we have not seen a store yet. See if Inst can write to
261
26
    // our load location, if it can not, just ignore the instruction.
262
26
    if (!isModSet(AA->getModRefInfo(Inst, Loc)))
263
11
      continue;
264
15
265
15
    Store = dyn_cast<StoreInst>(Inst);
266
15
267
15
    // If Inst can, then check if Inst is a simple store. If Inst is not a
268
15
    // store or a store that is not simple, then we have some we do not
269
15
    // understand writing to this memory implying we can not move the load
270
15
    // over the write to any subsequent store that we may find.
271
15
    if (!Store || !Store->isSimple())
272
1
      return nullptr;
273
14
274
14
    // Then make sure that the pointer we are storing to is Ptr. If so, we
275
14
    // found our Store!
276
14
    if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)
277
14
      continue;
278
0
279
0
    // Otherwise, we have an unknown store to some other ptr that clobbers
280
0
    // Loc.Ptr. Bail!
281
0
    return nullptr;
282
0
  }
283
16
284
16
  // If we did not find the store or did not see the release, fail.
285
16
  
if (13
!Store13
||
!SawRelease12
)
286
1
    return nullptr;
287
12
288
12
  // We succeeded!
289
12
  return Store;
290
12
}
291
292
static Instruction *
293
findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
294
                                    Instruction *Release,
295
12
                                    ProvenanceAnalysis &PA) {
296
12
  // Walk up from the Store to find the retain.
297
12
  BasicBlock::iterator I = Store->getIterator();
298
12
  BasicBlock::iterator Begin = Store->getParent()->begin();
299
49
  while (I != Begin && 
GetBasicARCInstKind(&*I) != ARCInstKind::Retain42
) {
300
39
    Instruction *Inst = &*I;
301
39
302
39
    // It is only safe to move the retain to the store if we can prove
303
39
    // conservatively that nothing besides the release can decrement reference
304
39
    // counts in between the retain and the store.
305
39
    if (CanDecrementRefCount(Inst, New, PA) && 
Inst != Release7
)
306
2
      return nullptr;
307
37
    --I;
308
37
  }
309
12
  Instruction *Retain = &*I;
310
10
  if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
311
1
    return nullptr;
312
9
  if (GetArgRCIdentityRoot(Retain) != New)
313
0
    return nullptr;
314
9
  return Retain;
315
9
}
316
317
/// Create a call instruction with the correct funclet token. Should be used
318
/// instead of calling CallInst::Create directly.
319
static CallInst *
320
createCallInst(FunctionType *FTy, Value *Func, ArrayRef<Value *> Args,
321
               const Twine &NameStr, Instruction *InsertBefore,
322
16
               const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
323
16
  SmallVector<OperandBundleDef, 1> OpBundles;
324
16
  if (!BlockColors.empty()) {
325
5
    const ColorVector &CV = BlockColors.find(InsertBefore->getParent())->second;
326
5
    assert(CV.size() == 1 && "non-unique color for block!");
327
5
    Instruction *EHPad = CV.front()->getFirstNonPHI();
328
5
    if (EHPad->isEHPad())
329
3
      OpBundles.emplace_back("funclet", EHPad);
330
5
  }
331
16
332
16
  return CallInst::Create(FTy, Func, Args, OpBundles, NameStr, InsertBefore);
333
16
}
334
335
static CallInst *
336
createCallInst(FunctionCallee Func, ArrayRef<Value *> Args, const Twine &NameStr,
337
               Instruction *InsertBefore,
338
16
               const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
339
16
  return createCallInst(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
340
16
                        InsertBefore, BlockColors);
341
16
}
342
343
/// Attempt to merge an objc_release with a store, load, and objc_retain to form
344
/// an objc_storeStrong. An objc_storeStrong:
345
///
346
///   objc_storeStrong(i8** %old_ptr, i8* new_value)
347
///
348
/// is equivalent to the following IR sequence:
349
///
350
///   ; Load old value.
351
///   %old_value = load i8** %old_ptr               (1)
352
///
353
///   ; Increment the new value and then release the old value. This must occur
354
///   ; in order in case old_value releases new_value in its destructor causing
355
///   ; us to potentially have a dangling ptr.
356
///   tail call i8* @objc_retain(i8* %new_value)    (2)
357
///   tail call void @objc_release(i8* %old_value)  (3)
358
///
359
///   ; Store the new_value into old_ptr
360
///   store i8* %new_value, i8** %old_ptr           (4)
361
///
362
/// The safety of this optimization is based around the following
363
/// considerations:
364
///
365
///  1. We are forming the store strong at the store. Thus to perform this
366
///     optimization it must be safe to move the retain, load, and release to
367
///     (4).
368
///  2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
369
///     safe.
370
void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
371
    Instruction *Release, inst_iterator &Iter,
372
37
    const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
373
37
  // See if we are releasing something that we just loaded.
374
37
  auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
375
37
  if (!Load || 
!Load->isSimple()18
)
376
20
    return;
377
17
378
17
  // For now, require everything to be in one basic block.
379
17
  BasicBlock *BB = Release->getParent();
380
17
  if (Load->getParent() != BB)
381
1
    return;
382
16
383
16
  // First scan down the BB from Load, looking for a store of the RCIdentityRoot
384
16
  // of Load's
385
16
  StoreInst *Store =
386
16
      findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);
387
16
  // If we fail, bail.
388
16
  if (!Store)
389
4
    return;
390
12
391
12
  // Then find what new_value's RCIdentity Root is.
392
12
  Value *New = GetRCIdentityRoot(Store->getValueOperand());
393
12
394
12
  // Then walk up the BB and look for a retain on New without any intervening
395
12
  // instructions which conservatively might decrement ref counts.
396
12
  Instruction *Retain =
397
12
      findRetainForStoreStrongContraction(New, Store, Release, PA);
398
12
399
12
  // If we fail, bail.
400
12
  if (!Retain)
401
3
    return;
402
9
403
9
  Changed = true;
404
9
  ++NumStoreStrongs;
405
9
406
9
  LLVM_DEBUG(
407
9
      llvm::dbgs() << "    Contracting retain, release into objc_storeStrong.\n"
408
9
                   << "        Old:\n"
409
9
                   << "            Store:   " << *Store << "\n"
410
9
                   << "            Release: " << *Release << "\n"
411
9
                   << "            Retain:  " << *Retain << "\n"
412
9
                   << "            Load:    " << *Load << "\n");
413
9
414
9
  LLVMContext &C = Release->getContext();
415
9
  Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
416
9
  Type *I8XX = PointerType::getUnqual(I8X);
417
9
418
9
  Value *Args[] = { Load->getPointerOperand(), New };
419
9
  if (Args[0]->getType() != I8XX)
420
2
    Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
421
9
  if (Args[1]->getType() != I8X)
422
1
    Args[1] = new BitCastInst(Args[1], I8X, "", Store);
423
9
  Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
424
9
  CallInst *StoreStrong = createCallInst(Decl, Args, "", Store, BlockColors);
425
9
  StoreStrong->setDoesNotThrow();
426
9
  StoreStrong->setDebugLoc(Store->getDebugLoc());
427
9
428
9
  // We can't set the tail flag yet, because we haven't yet determined
429
9
  // whether there are any escaping allocas. Remember this call, so that
430
9
  // we can set the tail flag once we know it's safe.
431
9
  StoreStrongCalls.insert(StoreStrong);
432
9
433
9
  LLVM_DEBUG(llvm::dbgs() << "        New Store Strong: " << *StoreStrong
434
9
                          << "\n");
435
9
436
9
  if (&*Iter == Retain) 
++Iter1
;
437
9
  if (&*Iter == Store) 
++Iter1
;
438
9
  Store->eraseFromParent();
439
9
  Release->eraseFromParent();
440
9
  EraseInstruction(Retain);
441
9
  if (Load->use_empty())
442
5
    Load->eraseFromParent();
443
9
}
444
445
bool ObjCARCContract::tryToPeepholeInstruction(
446
    Function &F, Instruction *Inst, inst_iterator &Iter,
447
    SmallPtrSetImpl<Instruction *> &DependingInsts,
448
    SmallPtrSetImpl<const BasicBlock *> &Visited, bool &TailOkForStoreStrongs,
449
479
    const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
450
479
  // Only these library routines return their argument. In particular,
451
479
  // objc_retainBlock does not necessarily return its argument.
452
479
  ARCInstKind Class = GetBasicARCInstKind(Inst);
453
479
  switch (Class) {
454
479
  case ARCInstKind::FusedRetainAutorelease:
455
0
  case ARCInstKind::FusedRetainAutoreleaseRV:
456
0
    return false;
457
14
  case ARCInstKind::Autorelease:
458
14
  case ARCInstKind::AutoreleaseRV:
459
14
    return contractAutorelease(F, Inst, Class, DependingInsts, Visited);
460
42
  case ARCInstKind::Retain:
461
42
    // Attempt to convert retains to retainrvs if they are next to function
462
42
    // calls.
463
42
    if (!optimizeRetainCall(F, Inst))
464
32
      return false;
465
10
    // If we succeed in our optimization, fall through.
466
10
    LLVM_FALLTHROUGH;
467
22
  case ARCInstKind::RetainRV:
468
22
  case ARCInstKind::ClaimRV: {
469
22
    // If we're compiling for a target which needs a special inline-asm
470
22
    // marker to do the return value optimization, insert it now.
471
22
    if (!RVInstMarker)
472
14
      return false;
473
8
    BasicBlock::iterator BBI = Inst->getIterator();
474
8
    BasicBlock *InstParent = Inst->getParent();
475
8
476
8
    // Step up to see if the call immediately precedes the RV call.
477
8
    // If it's an invoke, we have to cross a block boundary. And we have
478
8
    // to carefully dodge no-op instructions.
479
12
    do {
480
12
      if (BBI == InstParent->begin()) {
481
3
        BasicBlock *Pred = InstParent->getSinglePredecessor();
482
3
        if (!Pred)
483
1
          goto decline_rv_optimization;
484
2
        BBI = Pred->getTerminator()->getIterator();
485
2
        break;
486
2
      }
487
9
      --BBI;
488
9
    } while (IsNoopInstruction(&*BBI));
489
8
490
8
    
if (7
&*BBI == GetArgRCIdentityRoot(Inst)7
) {
491
7
      LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "
492
7
                           "optimization.\n");
493
7
      Changed = true;
494
7
      InlineAsm *IA =
495
7
          InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
496
7
                                           /*isVarArg=*/false),
497
7
                         RVInstMarker->getString(),
498
7
                         /*Constraints=*/"", /*hasSideEffects=*/true);
499
7
500
7
      createCallInst(IA, None, "", Inst, BlockColors);
501
7
    }
502
8
  decline_rv_optimization:
503
8
    return false;
504
7
  }
505
7
  case ARCInstKind::InitWeak: {
506
1
    // objc_initWeak(p, null) => *p = null
507
1
    CallInst *CI = cast<CallInst>(Inst);
508
1
    if (IsNullOrUndef(CI->getArgOperand(1))) {
509
1
      Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));
510
1
      Changed = true;
511
1
      new StoreInst(Null, CI->getArgOperand(0), CI);
512
1
513
1
      LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
514
1
                        << "                 New = " << *Null << "\n");
515
1
516
1
      CI->replaceAllUsesWith(Null);
517
1
      CI->eraseFromParent();
518
1
    }
519
1
    return true;
520
7
  }
521
37
  case ARCInstKind::Release:
522
37
    // Try to form an objc store strong from our release. If we fail, there is
523
37
    // nothing further to do below, so continue.
524
37
    tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);
525
37
    return true;
526
287
  case ARCInstKind::User:
527
287
    // Be conservative if the function has any alloca instructions.
528
287
    // Technically we only care about escaping alloca instructions,
529
287
    // but this is sufficient to handle some interesting cases.
530
287
    if (isa<AllocaInst>(Inst))
531
0
      TailOkForStoreStrongs = false;
532
287
    return true;
533
7
  case ARCInstKind::IntrinsicUser:
534
2
    // Remove calls to @llvm.objc.clang.arc.use(...).
535
2
    Inst->eraseFromParent();
536
2
    return true;
537
84
  default:
538
84
    return true;
539
479
  }
540
479
}
541
542
//===----------------------------------------------------------------------===//
543
//                              Top Level Driver
544
//===----------------------------------------------------------------------===//
545
546
273k
bool ObjCARCContract::runOnFunction(Function &F) {
547
273k
  if (!EnableARCOpts)
548
0
    return false;
549
273k
550
273k
  // If nothing in the Module uses ARC, don't do anything.
551
273k
  if (!Run)
552
273k
    return false;
553
51
554
51
  Changed = false;
555
51
  AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
556
51
  DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
557
51
558
51
  PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults());
559
51
560
51
  DenseMap<BasicBlock *, ColorVector> BlockColors;
561
51
  if (F.hasPersonalityFn() &&
562
51
      
isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn()))5
)
563
3
    BlockColors = colorEHFunclets(F);
564
51
565
51
  LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
566
51
567
51
  // Track whether it's ok to mark objc_storeStrong calls with the "tail"
568
51
  // keyword. Be conservative if the function has variadic arguments.
569
51
  // It seems that functions which "return twice" are also unsafe for the
570
51
  // "tail" argument, because they are setjmp, which could need to
571
51
  // return to an earlier stack state.
572
51
  bool TailOkForStoreStrongs =
573
51
      !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
574
51
575
51
  // For ObjC library calls which return their argument, replace uses of the
576
51
  // argument with uses of the call return value, if it dominates the use. This
577
51
  // reduces register pressure.
578
51
  SmallPtrSet<Instruction *, 4> DependingInstructions;
579
51
  SmallPtrSet<const BasicBlock *, 4> Visited;
580
51
581
51
  // Cache the basic block size.
582
51
  DenseMap<const BasicBlock *, unsigned> BBSizeMap;
583
51
584
51
  // A lambda that lazily computes the size of a basic block and determines
585
51
  // whether the size exceeds MaxBBSize.
586
113
  auto IsLargeBB = [&](const BasicBlock *BB) {
587
113
    unsigned BBSize;
588
113
    auto I = BBSizeMap.find(BB);
589
113
590
113
    if (I != BBSizeMap.end())
591
56
      BBSize = I->second;
592
57
    else
593
57
      BBSize = BBSizeMap[BB] = BB->size();
594
113
595
113
    return BBSize > MaxBBSize;
596
113
  };
597
51
598
530
  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
599
479
    Instruction *Inst = &*I++;
600
479
601
479
    LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
602
479
603
479
    // First try to peephole Inst. If there is nothing further we can do in
604
479
    // terms of undoing objc-arc-expand, process the next inst.
605
479
    if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited,
606
479
                                 TailOkForStoreStrongs, BlockColors))
607
417
      continue;
608
62
609
62
    // Otherwise, try to undo objc-arc-expand.
610
62
611
62
    // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
612
62
    // and such; to do the replacement, the argument must have type i8*.
613
62
614
62
    // Function for replacing uses of Arg dominated by Inst.
615
81
    
auto ReplaceArgUses = [Inst, IsLargeBB, this](Value *Arg) 62
{
616
81
      // If we're compiling bugpointed code, don't get in trouble.
617
81
      if (!isa<Instruction>(Arg) && 
!isa<Argument>(Arg)29
)
618
1
        return;
619
80
620
80
      // Look through the uses of the pointer.
621
80
      for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
622
208
           UI != UE; ) {
623
128
        // Increment UI now, because we may unlink its element.
624
128
        Use &U = *UI++;
625
128
        unsigned OperandNo = U.getOperandNo();
626
128
627
128
        // Don't replace the uses if Inst and the user belong to the same basic
628
128
        // block and the size of the basic block is large. We don't want to call
629
128
        // DominatorTree::dominate in that case. We can remove this check if we
630
128
        // can use OrderedBasicBlock to compute the dominance relation between
631
128
        // two instructions, but that's not currently possible since it doesn't
632
128
        // recompute the instruction ordering when new instructions are inserted
633
128
        // to the basic block.
634
128
        if (Inst->getParent() == cast<Instruction>(U.getUser())->getParent() &&
635
128
            
IsLargeBB(Inst->getParent())113
)
636
2
          continue;
637
126
638
126
        // If the call's return value dominates a use of the call's argument
639
126
        // value, rewrite the use to use the return value. We check for
640
126
        // reachability here because an unreachable call is considered to
641
126
        // trivially dominate itself, which would lead us to rewriting its
642
126
        // argument in terms of its return value, which would lead to
643
126
        // infinite loops in GetArgRCIdentityRoot.
644
126
        if (!DT->isReachableFromEntry(U) || 
!DT->dominates(Inst, U)124
)
645
82
          continue;
646
44
647
44
        Changed = true;
648
44
        Instruction *Replacement = Inst;
649
44
        Type *UseTy = U.get()->getType();
650
44
        if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
651
8
          // For PHI nodes, insert the bitcast in the predecessor block.
652
8
          unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
653
8
          BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);
654
8
          if (Replacement->getType() != UseTy) {
655
5
            // A catchswitch is both a pad and a terminator, meaning a basic
656
5
            // block with a catchswitch has no insertion point. Keep going up
657
5
            // the dominator tree until we find a non-catchswitch.
658
5
            BasicBlock *InsertBB = IncomingBB;
659
7
            while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {
660
2
              InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();
661
2
            }
662
5
663
5
            assert(DT->dominates(Inst, &InsertBB->back()) &&
664
5
                   "Invalid insertion point for bitcast");
665
5
            Replacement =
666
5
                new BitCastInst(Replacement, UseTy, "", &InsertBB->back());
667
5
          }
668
8
669
8
          // While we're here, rewrite all edges for this PHI, rather
670
8
          // than just one use at a time, to minimize the number of
671
8
          // bitcasts we emit.
672
25
          for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; 
++i17
)
673
17
            if (PHI->getIncomingBlock(i) == IncomingBB) {
674
9
              // Keep the UI iterator valid.
675
9
              if (UI != UE &&
676
9
                  &PHI->getOperandUse(
677
6
                      PHINode::getOperandNumForIncomingValue(i)) == &*UI)
678
1
                ++UI;
679
9
              PHI->setIncomingValue(i, Replacement);
680
9
            }
681
36
        } else {
682
36
          if (Replacement->getType() != UseTy)
683
4
            Replacement = new BitCastInst(Replacement, UseTy, "",
684
4
                                          cast<Instruction>(U.getUser()));
685
36
          U.set(Replacement);
686
36
        }
687
44
      }
688
80
    };
689
62
690
62
691
62
    Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
692
62
    Value *OrigArg = Arg;
693
62
694
62
    // TODO: Change this to a do-while.
695
77
    for (;;) {
696
77
      ReplaceArgUses(Arg);
697
77
698
77
      // If Arg is a no-op casted pointer, strip one level of casts and iterate.
699
77
      if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
700
15
        Arg = BI->getOperand(0);
701
62
      else if (isa<GEPOperator>(Arg) &&
702
62
               
cast<GEPOperator>(Arg)->hasAllZeroIndices()1
)
703
0
        Arg = cast<GEPOperator>(Arg)->getPointerOperand();
704
62
      else if (isa<GlobalAlias>(Arg) &&
705
62
               
!cast<GlobalAlias>(Arg)->isInterposable()0
)
706
0
        Arg = cast<GlobalAlias>(Arg)->getAliasee();
707
62
      else {
708
62
        // If Arg is a PHI node, get PHIs that are equivalent to it and replace
709
62
        // their uses.
710
62
        if (PHINode *PN = dyn_cast<PHINode>(Arg)) {
711
2
          SmallVector<Value *, 1> PHIList;
712
2
          getEquivalentPHIs(*PN, PHIList);
713
2
          for (Value *PHI : PHIList)
714
1
            ReplaceArgUses(PHI);
715
2
        }
716
62
        break;
717
62
      }
718
77
    }
719
62
720
62
    // Replace bitcast users of Arg that are dominated by Inst.
721
62
    SmallVector<BitCastInst *, 2> BitCastUsers;
722
62
723
62
    // Add all bitcast users of the function argument first.
724
62
    for (User *U : OrigArg->users())
725
70
      if (auto *BC = dyn_cast<BitCastInst>(U))
726
3
        BitCastUsers.push_back(BC);
727
62
728
62
    // Replace the bitcasts with the call return. Iterate until list is empty.
729
65
    while (!BitCastUsers.empty()) {
730
3
      auto *BC = BitCastUsers.pop_back_val();
731
3
      for (User *U : BC->users())
732
3
        if (auto *B = dyn_cast<BitCastInst>(U))
733
0
          BitCastUsers.push_back(B);
734
3
735
3
      ReplaceArgUses(BC);
736
3
    }
737
62
  }
738
51
739
51
  // If this function has no escaping allocas or suspicious vararg usage,
740
51
  // objc_storeStrong calls can be marked with the "tail" keyword.
741
51
  if (TailOkForStoreStrongs)
742
51
    for (CallInst *CI : StoreStrongCalls)
743
9
      CI->setTailCall();
744
51
  StoreStrongCalls.clear();
745
51
746
51
  return Changed;
747
51
}
748
749
//===----------------------------------------------------------------------===//
750
//                             Misc Pass Manager
751
//===----------------------------------------------------------------------===//
752
753
char ObjCARCContract::ID = 0;
754
23.0k
INITIALIZE_PASS_BEGIN(ObjCARCContract, "objc-arc-contract",
755
23.0k
                      "ObjC ARC contraction", false, false)
756
23.0k
INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
757
23.0k
INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
758
23.0k
INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract",
759
                    "ObjC ARC contraction", false, false)
760
761
12.0k
void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
762
12.0k
  AU.addRequired<AAResultsWrapperPass>();
763
12.0k
  AU.addRequired<DominatorTreeWrapperPass>();
764
12.0k
  AU.setPreservesCFG();
765
12.0k
}
766
767
12.0k
Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); }
768
769
12.0k
bool ObjCARCContract::doInitialization(Module &M) {
770
12.0k
  // If nothing in the Module uses ARC, don't do anything.
771
12.0k
  Run = ModuleHasARC(M);
772
12.0k
  if (!Run)
773
12.0k
    return false;
774
20
775
20
  EP.init(&M);
776
20
777
20
  // Initialize RVInstMarker.
778
20
  const char *MarkerKey = "clang.arc.retainAutoreleasedReturnValueMarker";
779
20
  RVInstMarker = dyn_cast_or_null<MDString>(M.getModuleFlag(MarkerKey));
780
20
781
20
  return false;
782
20
}