Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Analysis/CaptureTracking.cpp
Line
Count
Source (jump to first uncovered line)
1
//===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file contains routines that help determine which pointers are captured.
10
// A pointer value is captured if the function makes a copy of any part of the
11
// pointer that outlives the call.  Not being captured means, more or less, that
12
// the pointer is only dereferenced and not stored in a global.  Returning part
13
// of the pointer as the function return value may or may not count as capturing
14
// the pointer, depending on the context.
15
//
16
//===----------------------------------------------------------------------===//
17
18
#include "llvm/Analysis/CaptureTracking.h"
19
#include "llvm/ADT/SmallSet.h"
20
#include "llvm/ADT/SmallVector.h"
21
#include "llvm/Analysis/AliasAnalysis.h"
22
#include "llvm/Analysis/CFG.h"
23
#include "llvm/Analysis/OrderedBasicBlock.h"
24
#include "llvm/Analysis/ValueTracking.h"
25
#include "llvm/IR/Constants.h"
26
#include "llvm/IR/Dominators.h"
27
#include "llvm/IR/Instructions.h"
28
#include "llvm/IR/IntrinsicInst.h"
29
30
using namespace llvm;
31
32
4.17M
CaptureTracker::~CaptureTracker() {}
33
34
78.6M
bool CaptureTracker::shouldExplore(const Use *U) { return true; }
35
36
namespace {
37
  struct SimpleCaptureTracker : public CaptureTracker {
38
    explicit SimpleCaptureTracker(bool ReturnCaptures)
39
3.57M
      : ReturnCaptures(ReturnCaptures), Captured(false) {}
40
41
886k
    void tooManyUses() override { Captured = true; }
42
43
2.54M
    bool captured(const Use *U) override {
44
2.54M
      if (isa<ReturnInst>(U->getUser()) && 
!ReturnCaptures69.5k
)
45
67.9k
        return false;
46
2.47M
47
2.47M
      Captured = true;
48
2.47M
      return true;
49
2.47M
    }
50
51
    bool ReturnCaptures;
52
53
    bool Captured;
54
  };
55
56
  /// Only find pointer captures which happen before the given instruction. Uses
57
  /// the dominator tree to determine whether one instruction is before another.
58
  /// Only support the case where the Value is defined in the same basic block
59
  /// as the given instruction and the use.
60
  struct CapturesBefore : public CaptureTracker {
61
62
    CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT,
63
                   bool IncludeI, OrderedBasicBlock *IC)
64
      : OrderedBB(IC), BeforeHere(I), DT(DT),
65
446k
        ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
66
67
79.0k
    void tooManyUses() override { Captured = true; }
68
69
10.0M
    bool isSafeToPrune(Instruction *I) {
70
10.0M
      BasicBlock *BB = I->getParent();
71
10.0M
      // We explore this usage only if the usage can reach "BeforeHere".
72
10.0M
      // If use is not reachable from entry, there is no need to explore.
73
10.0M
      if (BeforeHere != I && 
!DT->isReachableFromEntry(BB)9.93M
)
74
1
        return true;
75
10.0M
76
10.0M
      // Compute the case where both instructions are inside the same basic
77
10.0M
      // block. Since instructions in the same BB as BeforeHere are numbered in
78
10.0M
      // 'OrderedBB', avoid using 'dominates' and 'isPotentiallyReachable'
79
10.0M
      // which are very expensive for large basic blocks.
80
10.0M
      if (BB == BeforeHere->getParent()) {
81
943k
        // 'I' dominates 'BeforeHere' => not safe to prune.
82
943k
        //
83
943k
        // The value defined by an invoke dominates an instruction only
84
943k
        // if it dominates every instruction in UseBB. A PHI is dominated only
85
943k
        // if the instruction dominates every possible use in the UseBB. Since
86
943k
        // UseBB == BB, avoid pruning.
87
943k
        if (isa<InvokeInst>(BeforeHere) || 
isa<PHINode>(I)867k
||
I == BeforeHere865k
)
88
188k
          return false;
89
755k
        if (!OrderedBB->dominates(BeforeHere, I))
90
436k
          return false;
91
318k
92
318k
        // 'BeforeHere' comes before 'I', it's safe to prune if we also
93
318k
        // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
94
318k
        // by its successors, i.e, prune if:
95
318k
        //
96
318k
        //  (1) BB is an entry block or have no successors.
97
318k
        //  (2) There's no path coming back through BB successors.
98
318k
        if (BB == &BB->getParent()->getEntryBlock() ||
99
318k
            
!BB->getTerminator()->getNumSuccessors()219k
)
100
115k
          return true;
101
202k
102
202k
        SmallVector<BasicBlock*, 32> Worklist;
103
202k
        Worklist.append(succ_begin(BB), succ_end(BB));
104
202k
        return !isPotentiallyReachableFromMany(Worklist, BB, nullptr, DT);
105
202k
      }
106
9.12M
107
9.12M
      // If the value is defined in the same basic block as use and BeforeHere,
108
9.12M
      // there is no need to explore the use if BeforeHere dominates use.
109
9.12M
      // Check whether there is a path from I to BeforeHere.
110
9.12M
      if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
111
9.12M
          
!isPotentiallyReachable(I, BeforeHere, nullptr, DT)750k
)
112
356k
        return true;
113
8.77M
114
8.77M
      return false;
115
8.77M
    }
116
117
10.0M
    bool shouldExplore(const Use *U) override {
118
10.0M
      Instruction *I = cast<Instruction>(U->getUser());
119
10.0M
120
10.0M
      if (BeforeHere == I && 
!IncludeI136k
)
121
0
        return false;
122
10.0M
123
10.0M
      if (isSafeToPrune(I))
124
603k
        return false;
125
9.46M
126
9.46M
      return true;
127
9.46M
    }
128
129
385k
    bool captured(const Use *U) override {
130
385k
      if (isa<ReturnInst>(U->getUser()) && 
!ReturnCaptures1.48k
)
131
0
        return false;
132
385k
133
385k
      if (!shouldExplore(U))
134
0
        return false;
135
385k
136
385k
      Captured = true;
137
385k
      return true;
138
385k
    }
139
140
    OrderedBasicBlock *OrderedBB;
141
    const Instruction *BeforeHere;
142
    const DominatorTree *DT;
143
144
    bool ReturnCaptures;
145
    bool IncludeI;
146
147
    bool Captured;
148
  };
149
}
150
151
/// PointerMayBeCaptured - Return true if this pointer value may be captured
152
/// by the enclosing function (which is required to exist).  This routine can
153
/// be expensive, so consider caching the results.  The boolean ReturnCaptures
154
/// specifies whether returning the value (or part of it) from the function
155
/// counts as capturing it or not.  The boolean StoreCaptures specified whether
156
/// storing the value (or part of it) into memory anywhere automatically
157
/// counts as capturing it or not.
158
bool llvm::PointerMayBeCaptured(const Value *V,
159
                                bool ReturnCaptures, bool StoreCaptures,
160
3.57M
                                unsigned MaxUsesToExplore) {
161
3.57M
  assert(!isa<GlobalValue>(V) &&
162
3.57M
         "It doesn't make sense to ask whether a global is captured.");
163
3.57M
164
3.57M
  // TODO: If StoreCaptures is not true, we could do Fancy analysis
165
3.57M
  // to determine whether this store is not actually an escape point.
166
3.57M
  // In that case, BasicAliasAnalysis should be updated as well to
167
3.57M
  // take advantage of this.
168
3.57M
  (void)StoreCaptures;
169
3.57M
170
3.57M
  SimpleCaptureTracker SCT(ReturnCaptures);
171
3.57M
  PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
172
3.57M
  return SCT.Captured;
173
3.57M
}
174
175
/// PointerMayBeCapturedBefore - Return true if this pointer value may be
176
/// captured by the enclosing function (which is required to exist). If a
177
/// DominatorTree is provided, only captures which happen before the given
178
/// instruction are considered. This routine can be expensive, so consider
179
/// caching the results.  The boolean ReturnCaptures specifies whether
180
/// returning the value (or part of it) from the function counts as capturing
181
/// it or not.  The boolean StoreCaptures specified whether storing the value
182
/// (or part of it) into memory anywhere automatically counts as capturing it
183
/// or not. A ordered basic block \p OBB can be used in order to speed up
184
/// queries about relative order among instructions in the same basic block.
185
bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
186
                                      bool StoreCaptures, const Instruction *I,
187
                                      const DominatorTree *DT, bool IncludeI,
188
                                      OrderedBasicBlock *OBB,
189
446k
                                      unsigned MaxUsesToExplore) {
190
446k
  assert(!isa<GlobalValue>(V) &&
191
446k
         "It doesn't make sense to ask whether a global is captured.");
192
446k
  bool UseNewOBB = OBB == nullptr;
193
446k
194
446k
  if (!DT)
195
0
    return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
196
0
                                MaxUsesToExplore);
197
446k
  if (UseNewOBB)
198
15.1k
    OBB = new OrderedBasicBlock(I->getParent());
199
446k
200
446k
  // TODO: See comment in PointerMayBeCaptured regarding what could be done
201
446k
  // with StoreCaptures.
202
446k
203
446k
  CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, OBB);
204
446k
  PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
205
446k
206
446k
  if (UseNewOBB)
207
15.1k
    delete OBB;
208
446k
  return CB.Captured;
209
446k
}
210
211
void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
212
4.17M
                                unsigned MaxUsesToExplore) {
213
4.17M
  assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
214
4.17M
  SmallVector<const Use *, DefaultMaxUsesToExplore> Worklist;
215
4.17M
  SmallSet<const Use *, DefaultMaxUsesToExplore> Visited;
216
4.17M
217
31.8M
  auto AddUses = [&](const Value *V) {
218
31.8M
    unsigned Count = 0;
219
89.9M
    for (const Use &U : V->uses()) {
220
89.9M
      // If there are lots of uses, conservatively say that the value
221
89.9M
      // is captured to avoid taking too much compile time.
222
89.9M
      if (Count++ >= MaxUsesToExplore)
223
967k
        return Tracker->tooManyUses();
224
88.9M
      if (!Visited.insert(&U).second)
225
660k
        continue;
226
88.3M
      if (!Tracker->shouldExplore(&U))
227
603k
        continue;
228
87.7M
      Worklist.push_back(&U);
229
87.7M
    }
230
31.8M
  };
231
4.17M
  AddUses(V);
232
4.17M
233
69.9M
  while (!Worklist.empty()) {
234
68.7M
    const Use *U = Worklist.pop_back_val();
235
68.7M
    Instruction *I = cast<Instruction>(U->getUser());
236
68.7M
    V = U->get();
237
68.7M
238
68.7M
    switch (I->getOpcode()) {
239
68.7M
    case Instruction::Call:
240
10.6M
    case Instruction::Invoke: {
241
10.6M
      auto *Call = cast<CallBase>(I);
242
10.6M
      // Not captured if the callee is readonly, doesn't return a copy through
243
10.6M
      // its return value and doesn't unwind (a readonly function can leak bits
244
10.6M
      // by throwing an exception or not depending on the input value).
245
10.6M
      if (Call->onlyReadsMemory() && 
Call->doesNotThrow()41.2k
&&
246
10.6M
          
Call->getType()->isVoidTy()41.2k
)
247
8
        break;
248
10.6M
249
10.6M
      // The pointer is not captured if returned pointer is not captured.
250
10.6M
      // NOTE: CaptureTracking users should not assume that only functions
251
10.6M
      // marked with nocapture do not capture. This means that places like
252
10.6M
      // GetUnderlyingObject in ValueTracking or DecomposeGEPExpression
253
10.6M
      // in BasicAA also need to know about this property.
254
10.6M
      if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call)) {
255
76
        AddUses(Call);
256
76
        break;
257
76
      }
258
10.6M
259
10.6M
      // Volatile operations effectively capture the memory location that they
260
10.6M
      // load and store to.
261
10.6M
      if (auto *MI = dyn_cast<MemIntrinsic>(Call))
262
1.70M
        if (MI->isVolatile())
263
261
          if (Tracker->captured(U))
264
261
            return;
265
10.6M
266
10.6M
      // Not captured if only passed via 'nocapture' arguments.  Note that
267
10.6M
      // calling a function pointer does not in itself cause the pointer to
268
10.6M
      // be captured.  This is a subtle point considering that (for example)
269
10.6M
      // the callee might return its own address.  It is analogous to saying
270
10.6M
      // that loading a value from a pointer does not cause the pointer to be
271
10.6M
      // captured, even though the loaded value might be the pointer itself
272
10.6M
      // (think of self-referential objects).
273
25.5M
      
for (auto IdxOpPair : enumerate(Call->data_ops()))10.6M
{
274
25.5M
        int Idx = IdxOpPair.index();
275
25.5M
        Value *A = IdxOpPair.value();
276
25.5M
        if (A == V && 
!Call->doesNotCapture(Idx)10.6M
)
277
2.40M
          // The parameter is not marked 'nocapture' - captured.
278
2.40M
          if (Tracker->captured(U))
279
2.39M
            return;
280
25.5M
      }
281
10.6M
      
break8.28M
;
282
10.6M
    }
283
14.9M
    case Instruction::Load:
284
14.9M
      // Volatile loads make the address observable.
285
14.9M
      if (cast<LoadInst>(I)->isVolatile())
286
936
        if (Tracker->captured(U))
287
936
          return;
288
14.9M
      break;
289
14.9M
    case Instruction::VAArg:
290
1.41k
      // "va-arg" from a pointer does not cause it to be captured.
291
1.41k
      break;
292
15.0M
    case Instruction::Store:
293
15.0M
        // Stored the pointer - conservatively assume it may be captured.
294
15.0M
        // Volatile stores make the address observable.
295
15.0M
      if (V == I->getOperand(0) || 
cast<StoreInst>(I)->isVolatile()14.7M
)
296
299k
        if (Tracker->captured(U))
297
299k
          return;
298
14.7M
      break;
299
14.7M
    case Instruction::AtomicRMW: {
300
1.06k
      // atomicrmw conceptually includes both a load and store from
301
1.06k
      // the same location.
302
1.06k
      // As with a store, the location being accessed is not captured,
303
1.06k
      // but the value being stored is.
304
1.06k
      // Volatile stores make the address observable.
305
1.06k
      auto *ARMWI = cast<AtomicRMWInst>(I);
306
1.06k
      if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
307
60
        if (Tracker->captured(U))
308
60
          return;
309
1.00k
      break;
310
1.00k
    }
311
1.00k
    case Instruction::AtomicCmpXchg: {
312
221
      // cmpxchg conceptually includes both a load and store from
313
221
      // the same location.
314
221
      // As with a store, the location being accessed is not captured,
315
221
      // but the value being stored is.
316
221
      // Volatile stores make the address observable.
317
221
      auto *ACXI = cast<AtomicCmpXchgInst>(I);
318
221
      if (ACXI->getCompareOperand() == V || 
ACXI->getNewValOperand() == V220
||
319
221
          
ACXI->isVolatile()218
)
320
54
        if (Tracker->captured(U))
321
54
          return;
322
167
      break;
323
167
    }
324
27.6M
    case Instruction::BitCast:
325
27.6M
    case Instruction::GetElementPtr:
326
27.6M
    case Instruction::PHI:
327
27.6M
    case Instruction::Select:
328
27.6M
    case Instruction::AddrSpaceCast:
329
27.6M
      // The original value is not captured via this if the new value isn't.
330
27.6M
      AddUses(I);
331
27.6M
      break;
332
27.6M
    case Instruction::ICmp: {
333
271k
      if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(1))) {
334
171k
        // Don't count comparisons of a no-alias return value against null as
335
171k
        // captures. This allows us to ignore comparisons of malloc results
336
171k
        // with null, for example.
337
171k
        if (CPN->getType()->getAddressSpace() == 0)
338
171k
          if (isNoAliasCall(V->stripPointerCasts()))
339
72.2k
            break;
340
98.8k
        if (!I->getFunction()->nullPointerIsDefined()) {
341
98.8k
          auto *O = I->getOperand(0)->stripPointerCastsSameRepresentation();
342
98.8k
          // An inbounds GEP can either be a valid pointer (pointing into
343
98.8k
          // or to the end of an allocation), or be null in the default
344
98.8k
          // address space. So for an inbounds GEPs there is no way to let
345
98.8k
          // the pointer escape using clever GEP hacking because doing so
346
98.8k
          // would make the pointer point outside of the allocated object
347
98.8k
          // and thus make the GEP result a poison value.
348
98.8k
          if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
349
3
            if (GEP->isInBounds())
350
2
              break;
351
98.8k
          // Comparing a dereferenceable_or_null argument against null
352
98.8k
          // cannot lead to pointer escapes, because if it is not null it
353
98.8k
          // must be a valid (in-bounds) pointer.
354
98.8k
          bool CanBeNull;
355
98.8k
          if (O->getPointerDereferenceableBytes(I->getModule()->getDataLayout(), CanBeNull))
356
5
            break;
357
199k
        }
358
98.8k
      }
359
199k
      // Comparison against value stored in global variable. Given the pointer
360
199k
      // does not escape, its value cannot be guessed and stored separately in a
361
199k
      // global variable.
362
199k
      unsigned OtherIndex = (I->getOperand(0) == V) ? 
1152k
:
046.4k
;
363
199k
      auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
364
199k
      if (LI && 
isa<GlobalVariable>(LI->getPointerOperand())28.3k
)
365
323
        break;
366
198k
      // Otherwise, be conservative. There are crazy ways to capture pointers
367
198k
      // using comparisons.
368
198k
      if (Tracker->captured(U))
369
198k
        return;
370
0
      break;
371
0
    }
372
148k
    default:
373
148k
      // Something else - be conservative and say it is captured.
374
148k
      if (Tracker->captured(U))
375
80.5k
        return;
376
67.9k
      break;
377
68.7M
    }
378
68.7M
  }
379
4.17M
380
4.17M
  // All uses examined.
381
4.17M
}