Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/IR/SafepointIRVerifier.cpp
Line
Count
Source (jump to first uncovered line)
1
//===-- SafepointIRVerifier.cpp - Verify gc.statepoint invariants ---------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// Run a sanity check on the IR to ensure that Safepoints - if they've been
10
// inserted - were inserted correctly.  In particular, look for use of
11
// non-relocated values after a safepoint.  It's primary use is to check the
12
// correctness of safepoint insertion immediately after insertion, but it can
13
// also be used to verify that later transforms have not found a way to break
14
// safepoint semenatics.
15
//
16
// In its current form, this verify checks a property which is sufficient, but
17
// not neccessary for correctness.  There are some cases where an unrelocated
18
// pointer can be used after the safepoint.  Consider this example:
19
//
20
//    a = ...
21
//    b = ...
22
//    (a',b') = safepoint(a,b)
23
//    c = cmp eq a b
24
//    br c, ..., ....
25
//
26
// Because it is valid to reorder 'c' above the safepoint, this is legal.  In
27
// practice, this is a somewhat uncommon transform, but CodeGenPrep does create
28
// idioms like this.  The verifier knows about these cases and avoids reporting
29
// false positives.
30
//
31
//===----------------------------------------------------------------------===//
32
33
#include "llvm/ADT/DenseSet.h"
34
#include "llvm/ADT/PostOrderIterator.h"
35
#include "llvm/ADT/SetOperations.h"
36
#include "llvm/ADT/SetVector.h"
37
#include "llvm/IR/BasicBlock.h"
38
#include "llvm/IR/Dominators.h"
39
#include "llvm/IR/Function.h"
40
#include "llvm/IR/Instructions.h"
41
#include "llvm/IR/Intrinsics.h"
42
#include "llvm/IR/IntrinsicInst.h"
43
#include "llvm/IR/Module.h"
44
#include "llvm/IR/Value.h"
45
#include "llvm/IR/SafepointIRVerifier.h"
46
#include "llvm/IR/Statepoint.h"
47
#include "llvm/Support/Debug.h"
48
#include "llvm/Support/CommandLine.h"
49
#include "llvm/Support/raw_ostream.h"
50
51
#define DEBUG_TYPE "safepoint-ir-verifier"
52
53
using namespace llvm;
54
55
/// This option is used for writing test cases.  Instead of crashing the program
56
/// when verification fails, report a message to the console (for FileCheck
57
/// usage) and continue execution as if nothing happened.
58
static cl::opt<bool> PrintOnly("safepoint-ir-verifier-print-only",
59
                               cl::init(false));
60
61
namespace {
62
63
/// This CFG Deadness finds dead blocks and edges. Algorithm starts with a set
64
/// of blocks unreachable from entry then propagates deadness using foldable
65
/// conditional branches without modifying CFG. So GVN does but it changes CFG
66
/// by splitting critical edges. In most cases passes rely on SimplifyCFG to
67
/// clean up dead blocks, but in some cases, like verification or loop passes
68
/// it's not possible.
69
class CFGDeadness {
70
  const DominatorTree *DT = nullptr;
71
  SetVector<const BasicBlock *> DeadBlocks;
72
  SetVector<const Use *> DeadEdges; // Contains all dead edges from live blocks.
73
74
public:
75
  /// Return the edge that coresponds to the predecessor.
76
135
  static const Use& getEdge(const_pred_iterator &PredIt) {
77
135
    auto &PU = PredIt.getUse();
78
135
    return PU.getUser()->getOperandUse(PU.getOperandNo());
79
135
  }
80
81
  /// Return true if there is at least one live edge that corresponds to the
82
  /// basic block InBB listed in the phi node.
83
76
  bool hasLiveIncomingEdge(const PHINode *PN, const BasicBlock *InBB) const {
84
76
    assert(!isDeadBlock(InBB) && "block must be live");
85
76
    const BasicBlock* BB = PN->getParent();
86
76
    bool Listed = false;
87
113
    for (const_pred_iterator PredIt(BB), End(BB, true); PredIt != End; 
++PredIt37
) {
88
113
      if (InBB == *PredIt) {
89
76
        if (!isDeadEdge(&getEdge(PredIt)))
90
76
          return true;
91
0
        Listed = true;
92
0
      }
93
113
    }
94
76
    (void)Listed;
95
0
    assert(Listed && "basic block is not found among incoming blocks");
96
0
    return false;
97
76
  }
98
99
100
77
  bool isDeadBlock(const BasicBlock *BB) const {
101
77
    return DeadBlocks.count(BB);
102
77
  }
103
104
135
  bool isDeadEdge(const Use *U) const {
105
135
    assert(dyn_cast<Instruction>(U->getUser())->isTerminator() &&
106
135
           "edge must be operand of terminator");
107
135
    assert(cast_or_null<BasicBlock>(U->get()) &&
108
135
           "edge must refer to basic block");
109
135
    assert(!isDeadBlock(dyn_cast<Instruction>(U->getUser())->getParent()) &&
110
135
           "isDeadEdge() must be applied to edge from live block");
111
135
    return DeadEdges.count(U);
112
135
  }
113
114
0
  bool hasLiveIncomingEdges(const BasicBlock *BB) const {
115
0
    // Check if all incoming edges are dead.
116
0
    for (const_pred_iterator PredIt(BB), End(BB, true); PredIt != End; ++PredIt) {
117
0
      auto &PU = PredIt.getUse();
118
0
      const Use &U = PU.getUser()->getOperandUse(PU.getOperandNo());
119
0
      if (!isDeadBlock(*PredIt) && !isDeadEdge(&U))
120
0
        return true; // Found a live edge.
121
0
    }
122
0
    return false;
123
0
  }
124
125
34
  void processFunction(const Function &F, const DominatorTree &DT) {
126
34
    this->DT = &DT;
127
34
128
34
    // Start with all blocks unreachable from entry.
129
34
    for (const BasicBlock &BB : F)
130
77
      if (!DT.isReachableFromEntry(&BB))
131
2
        DeadBlocks.insert(&BB);
132
34
133
34
    // Top-down walk of the dominator tree
134
34
    ReversePostOrderTraversal<const Function *> RPOT(&F);
135
75
    for (const BasicBlock *BB : RPOT) {
136
75
      const Instruction *TI = BB->getTerminator();
137
75
      assert(TI && "blocks must be well formed");
138
75
139
75
      // For conditional branches, we can perform simple conditional propagation on
140
75
      // the condition value itself.
141
75
      const BranchInst *BI = dyn_cast<BranchInst>(TI);
142
75
      if (!BI || 
!BI->isConditional()42
||
!isa<Constant>(BI->getCondition())13
)
143
65
        continue;
144
10
145
10
      // If a branch has two identical successors, we cannot declare either dead.
146
10
      if (BI->getSuccessor(0) == BI->getSuccessor(1))
147
0
        continue;
148
10
149
10
      ConstantInt *Cond = dyn_cast<ConstantInt>(BI->getCondition());
150
10
      if (!Cond)
151
10
        continue;
152
0
153
0
      addDeadEdge(BI->getOperandUse(Cond->getZExtValue() ? 1 : 2));
154
0
    }
155
34
  }
156
157
protected:
158
0
  void addDeadBlock(const BasicBlock *BB) {
159
0
    SmallVector<const BasicBlock *, 4> NewDead;
160
0
    SmallSetVector<const BasicBlock *, 4> DF;
161
0
162
0
    NewDead.push_back(BB);
163
0
    while (!NewDead.empty()) {
164
0
      const BasicBlock *D = NewDead.pop_back_val();
165
0
      if (isDeadBlock(D))
166
0
        continue;
167
0
168
0
      // All blocks dominated by D are dead.
169
0
      SmallVector<BasicBlock *, 8> Dom;
170
0
      DT->getDescendants(const_cast<BasicBlock*>(D), Dom);
171
0
      // Do not need to mark all in and out edges dead
172
0
      // because BB is marked dead and this is enough
173
0
      // to run further.
174
0
      DeadBlocks.insert(Dom.begin(), Dom.end());
175
0
176
0
      // Figure out the dominance-frontier(D).
177
0
      for (BasicBlock *B : Dom)
178
0
        for (BasicBlock *S : successors(B))
179
0
          if (!isDeadBlock(S) && !hasLiveIncomingEdges(S))
180
0
            NewDead.push_back(S);
181
0
    }
182
0
  }
183
184
0
  void addDeadEdge(const Use &DeadEdge) {
185
0
    if (!DeadEdges.insert(&DeadEdge))
186
0
      return;
187
0
188
0
    BasicBlock *BB = cast_or_null<BasicBlock>(DeadEdge.get());
189
0
    if (hasLiveIncomingEdges(BB))
190
0
      return;
191
0
192
0
    addDeadBlock(BB);
193
0
  }
194
};
195
} // namespace
196
197
static void Verify(const Function &F, const DominatorTree &DT,
198
                   const CFGDeadness &CD);
199
200
namespace llvm {
201
PreservedAnalyses SafepointIRVerifierPass::run(Function &F,
202
0
                                               FunctionAnalysisManager &AM) {
203
0
  const auto &DT = AM.getResult<DominatorTreeAnalysis>(F);
204
0
  CFGDeadness CD;
205
0
  CD.processFunction(F, DT);
206
0
  Verify(F, DT, CD);
207
0
  return PreservedAnalyses::all();
208
0
}
209
}
210
211
namespace {
212
213
struct SafepointIRVerifier : public FunctionPass {
214
  static char ID; // Pass identification, replacement for typeid
215
8
  SafepointIRVerifier() : FunctionPass(ID) {
216
8
    initializeSafepointIRVerifierPass(*PassRegistry::getPassRegistry());
217
8
  }
218
219
34
  bool runOnFunction(Function &F) override {
220
34
    auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
221
34
    CFGDeadness CD;
222
34
    CD.processFunction(F, DT);
223
34
    Verify(F, DT, CD);
224
34
    return false; // no modifications
225
34
  }
226
227
8
  void getAnalysisUsage(AnalysisUsage &AU) const override {
228
8
    AU.addRequiredID(DominatorTreeWrapperPass::ID);
229
8
    AU.setPreservesAll();
230
8
  }
231
232
34
  StringRef getPassName() const override { return "safepoint verifier"; }
233
};
234
} // namespace
235
236
0
void llvm::verifySafepointIR(Function &F) {
237
0
  SafepointIRVerifier pass;
238
0
  pass.runOnFunction(F);
239
0
}
240
241
char SafepointIRVerifier::ID = 0;
242
243
0
FunctionPass *llvm::createSafepointIRVerifierPass() {
244
0
  return new SafepointIRVerifier();
245
0
}
246
247
36.1k
INITIALIZE_PASS_BEGIN(SafepointIRVerifier, "verify-safepoint-ir",
248
36.1k
                      "Safepoint IR Verifier", false, false)
249
36.1k
INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
250
36.1k
INITIALIZE_PASS_END(SafepointIRVerifier, "verify-safepoint-ir",
251
                    "Safepoint IR Verifier", false, false)
252
253
1.55k
static bool isGCPointerType(Type *T) {
254
1.55k
  if (auto *PT = dyn_cast<PointerType>(T))
255
663
    // For the sake of this example GC, we arbitrarily pick addrspace(1) as our
256
663
    // GC managed heap.  We know that a pointer into this heap needs to be
257
663
    // updated and that no other pointer does.
258
663
    return (1 == PT->getAddressSpace());
259
895
  return false;
260
895
}
261
262
1.55k
static bool containsGCPtrType(Type *Ty) {
263
1.55k
  if (isGCPointerType(Ty))
264
568
    return true;
265
990
  if (VectorType *VT = dyn_cast<VectorType>(Ty))
266
0
    return isGCPointerType(VT->getScalarType());
267
990
  if (ArrayType *AT = dyn_cast<ArrayType>(Ty))
268
0
    return containsGCPtrType(AT->getElementType());
269
990
  if (StructType *ST = dyn_cast<StructType>(Ty))
270
0
    return llvm::any_of(ST->elements(), containsGCPtrType);
271
990
  return false;
272
990
}
273
274
// Debugging aid -- prints a [Begin, End) range of values.
275
template<typename IteratorTy>
276
static void PrintValueSet(raw_ostream &OS, IteratorTy Begin, IteratorTy End) {
277
  OS << "[ ";
278
  while (Begin != End) {
279
    OS << **Begin << " ";
280
    ++Begin;
281
  }
282
  OS << "]";
283
}
284
285
/// The verifier algorithm is phrased in terms of availability.  The set of
286
/// values "available" at a given point in the control flow graph is the set of
287
/// correctly relocated value at that point, and is a subset of the set of
288
/// definitions dominating that point.
289
290
using AvailableValueSet = DenseSet<const Value *>;
291
292
/// State we compute and track per basic block.
293
struct BasicBlockState {
294
  // Set of values available coming in, before the phi nodes
295
  AvailableValueSet AvailableIn;
296
297
  // Set of values available going out
298
  AvailableValueSet AvailableOut;
299
300
  // AvailableOut minus AvailableIn.
301
  // All elements are Instructions
302
  AvailableValueSet Contribution;
303
304
  // True if this block contains a safepoint and thus AvailableIn does not
305
  // contribute to AvailableOut.
306
  bool Cleared = false;
307
};
308
309
/// A given derived pointer can have multiple base pointers through phi/selects.
310
/// This type indicates when the base pointer is exclusively constant
311
/// (ExclusivelySomeConstant), and if that constant is proven to be exclusively
312
/// null, we record that as ExclusivelyNull. In all other cases, the BaseType is
313
/// NonConstant.
314
enum BaseType {
315
  NonConstant = 1, // Base pointers is not exclusively constant.
316
  ExclusivelyNull,
317
  ExclusivelySomeConstant // Base pointers for a given derived pointer is from a
318
                          // set of constants, but they are not exclusively
319
                          // null.
320
};
321
322
/// Return the baseType for Val which states whether Val is exclusively
323
/// derived from constant/null, or not exclusively derived from constant.
324
/// Val is exclusively derived off a constant base when all operands of phi and
325
/// selects are derived off a constant base.
326
247
static enum BaseType getBaseType(const Value *Val) {
327
247
328
247
  SmallVector<const Value *, 32> Worklist;
329
247
  DenseSet<const Value *> Visited;
330
247
  bool isExclusivelyDerivedFromNull = true;
331
247
  Worklist.push_back(Val);
332
247
  // Strip through all the bitcasts and geps to get base pointer. Also check for
333
247
  // the exclusive value when there can be multiple base pointers (through phis
334
247
  // or selects).
335
510
  while(!Worklist.empty()) {
336
454
    const Value *V = Worklist.pop_back_val();
337
454
    if (!Visited.insert(V).second)
338
2
      continue;
339
452
340
452
    if (const auto *CI = dyn_cast<CastInst>(V)) {
341
50
      Worklist.push_back(CI->stripPointerCasts());
342
50
      continue;
343
50
    }
344
402
    if (const auto *GEP = dyn_cast<GetElementPtrInst>(V)) {
345
105
      Worklist.push_back(GEP->getPointerOperand());
346
105
      continue;
347
105
    }
348
297
    // Push all the incoming values of phi node into the worklist for
349
297
    // processing.
350
297
    if (const auto *PN = dyn_cast<PHINode>(V)) {
351
42
      for (Value *InV: PN->incoming_values())
352
84
        Worklist.push_back(InV);
353
42
      continue;
354
42
    }
355
255
    if (const auto *SI = dyn_cast<SelectInst>(V)) {
356
2
      // Push in the true and false values
357
2
      Worklist.push_back(SI->getTrueValue());
358
2
      Worklist.push_back(SI->getFalseValue());
359
2
      continue;
360
2
    }
361
253
    if (isa<Constant>(V)) {
362
62
      // We found at least one base pointer which is non-null, so this derived
363
62
      // pointer is not exclusively derived from null.
364
62
      if (V != Constant::getNullValue(V->getType()))
365
39
        isExclusivelyDerivedFromNull = false;
366
62
      // Continue processing the remaining values to make sure it's exclusively
367
62
      // constant.
368
62
      continue;
369
62
    }
370
191
    // At this point, we know that the base pointer is not exclusively
371
191
    // constant.
372
191
    return BaseType::NonConstant;
373
191
  }
374
247
  // Now, we know that the base pointer is exclusively constant, but we need to
375
247
  // differentiate between exclusive null constant and non-null constant.
376
247
  
return isExclusivelyDerivedFromNull 56
?
BaseType::ExclusivelyNull20
377
56
                                      : 
BaseType::ExclusivelySomeConstant36
;
378
247
}
379
380
209
static bool isNotExclusivelyConstantDerived(const Value *V) {
381
209
  return getBaseType(V) == BaseType::NonConstant;
382
209
}
383
384
namespace {
385
class InstructionVerifier;
386
387
/// Builds BasicBlockState for each BB of the function.
388
/// It can traverse function for verification and provides all required
389
/// information.
390
///
391
/// GC pointer may be in one of three states: relocated, unrelocated and
392
/// poisoned.
393
/// Relocated pointer may be used without any restrictions.
394
/// Unrelocated pointer cannot be dereferenced, passed as argument to any call
395
/// or returned. Unrelocated pointer may be safely compared against another
396
/// unrelocated pointer or against a pointer exclusively derived from null.
397
/// Poisoned pointers are produced when we somehow derive pointer from relocated
398
/// and unrelocated pointers (e.g. phi, select). This pointers may be safely
399
/// used in a very limited number of situations. Currently the only way to use
400
/// it is comparison against constant exclusively derived from null. All
401
/// limitations arise due to their undefined state: this pointers should be
402
/// treated as relocated and unrelocated simultaneously.
403
/// Rules of deriving:
404
/// R + U = P - that's where the poisoned pointers come from
405
/// P + X = P
406
/// U + U = U
407
/// R + R = R
408
/// X + C = X
409
/// Where "+" - any operation that somehow derive pointer, U - unrelocated,
410
/// R - relocated and P - poisoned, C - constant, X - U or R or P or C or
411
/// nothing (in case when "+" is unary operation).
412
/// Deriving of pointers by itself is always safe.
413
/// NOTE: when we are making decision on the status of instruction's result:
414
/// a) for phi we need to check status of each input *at the end of
415
///    corresponding predecessor BB*.
416
/// b) for other instructions we need to check status of each input *at the
417
///    current point*.
418
///
419
/// FIXME: This works fairly well except one case
420
///     bb1:
421
///     p = *some GC-ptr def*
422
///     p1 = gep p, offset
423
///         /     |
424
///        /      |
425
///    bb2:       |
426
///    safepoint  |
427
///        \      |
428
///         \     |
429
///      bb3:
430
///      p2 = phi [p, bb2] [p1, bb1]
431
///      p3 = phi [p, bb2] [p, bb1]
432
///      here p and p1 is unrelocated
433
///           p2 and p3 is poisoned (though they shouldn't be)
434
///
435
/// This leads to some weird results:
436
///      cmp eq p, p2 - illegal instruction (false-positive)
437
///      cmp eq p1, p2 - illegal instruction (false-positive)
438
///      cmp eq p, p3 - illegal instruction (false-positive)
439
///      cmp eq p, p1 - ok
440
/// To fix this we need to introduce conception of generations and be able to
441
/// check if two values belong to one generation or not. This way p2 will be
442
/// considered to be unrelocated and no false alarm will happen.
443
class GCPtrTracker {
444
  const Function &F;
445
  const CFGDeadness &CD;
446
  SpecificBumpPtrAllocator<BasicBlockState> BSAllocator;
447
  DenseMap<const BasicBlock *, BasicBlockState *> BlockMap;
448
  // This set contains defs of unrelocated pointers that are proved to be legal
449
  // and don't need verification.
450
  DenseSet<const Instruction *> ValidUnrelocatedDefs;
451
  // This set contains poisoned defs. They can be safely ignored during
452
  // verification too.
453
  DenseSet<const Value *> PoisonedDefs;
454
455
public:
456
  GCPtrTracker(const Function &F, const DominatorTree &DT,
457
               const CFGDeadness &CD);
458
459
23
  bool hasLiveIncomingEdge(const PHINode *PN, const BasicBlock *InBB) const {
460
23
    return CD.hasLiveIncomingEdge(PN, InBB);
461
23
  }
462
463
  BasicBlockState *getBasicBlockState(const BasicBlock *BB);
464
  const BasicBlockState *getBasicBlockState(const BasicBlock *BB) const;
465
466
88
  bool isValuePoisoned(const Value *V) const { return PoisonedDefs.count(V); }
467
468
  /// Traverse each BB of the function and call
469
  /// InstructionVerifier::verifyInstruction for each possibly invalid
470
  /// instruction.
471
  /// It destructively modifies GCPtrTracker so it's passed via rvalue reference
472
  /// in order to prohibit further usages of GCPtrTracker as it'll be in
473
  /// inconsistent state.
474
  static void verifyFunction(GCPtrTracker &&Tracker,
475
                             InstructionVerifier &Verifier);
476
477
  /// Returns true for reachable and live blocks.
478
54
  bool isMapped(const BasicBlock *BB) const {
479
54
    return BlockMap.find(BB) != BlockMap.end();
480
54
  }
481
482
private:
483
  /// Returns true if the instruction may be safely skipped during verification.
484
  bool instructionMayBeSkipped(const Instruction *I) const;
485
486
  /// Iterates over all BBs from BlockMap and recalculates AvailableIn/Out for
487
  /// each of them until it converges.
488
  void recalculateBBsStates();
489
490
  /// Remove from Contribution all defs that legally produce unrelocated
491
  /// pointers and saves them to ValidUnrelocatedDefs.
492
  /// Though Contribution should belong to BBS it is passed separately with
493
  /// different const-modifier in order to emphasize (and guarantee) that only
494
  /// Contribution will be changed.
495
  /// Returns true if Contribution was changed otherwise false.
496
  bool removeValidUnrelocatedDefs(const BasicBlock *BB,
497
                                  const BasicBlockState *BBS,
498
                                  AvailableValueSet &Contribution);
499
500
  /// Gather all the definitions dominating the start of BB into Result. This is
501
  /// simply the defs introduced by every dominating basic block and the
502
  /// function arguments.
503
  void gatherDominatingDefs(const BasicBlock *BB, AvailableValueSet &Result,
504
                            const DominatorTree &DT);
505
506
  /// Compute the AvailableOut set for BB, based on the BasicBlockState BBS,
507
  /// which is the BasicBlockState for BB.
508
  /// ContributionChanged is set when the verifier runs for the first time
509
  /// (in this case Contribution was changed from 'empty' to its initial state)
510
  /// or when Contribution of this BB was changed since last computation.
511
  static void transferBlock(const BasicBlock *BB, BasicBlockState &BBS,
512
                            bool ContributionChanged);
513
514
  /// Model the effect of an instruction on the set of available values.
515
  static void transferInstruction(const Instruction &I, bool &Cleared,
516
                                  AvailableValueSet &Available);
517
};
518
519
/// It is a visitor for GCPtrTracker::verifyFunction. It decides if the
520
/// instruction (which uses heap reference) is legal or not, given our safepoint
521
/// semantics.
522
class InstructionVerifier {
523
  bool AnyInvalidUses = false;
524
525
public:
526
  void verifyInstruction(const GCPtrTracker *Tracker, const Instruction &I,
527
                         const AvailableValueSet &AvailableSet);
528
529
34
  bool hasAnyInvalidUses() const { return AnyInvalidUses; }
530
531
private:
532
  void reportInvalidUse(const Value &V, const Instruction &I);
533
};
534
} // end anonymous namespace
535
536
GCPtrTracker::GCPtrTracker(const Function &F, const DominatorTree &DT,
537
34
                           const CFGDeadness &CD) : F(F), CD(CD) {
538
34
  // Calculate Contribution of each live BB.
539
34
  // Allocate BB states for live blocks.
540
34
  for (const BasicBlock &BB : F)
541
77
    if (!CD.isDeadBlock(&BB)) {
542
75
      BasicBlockState *BBS = new (BSAllocator.Allocate()) BasicBlockState;
543
75
      for (const auto &I : BB)
544
234
        transferInstruction(I, BBS->Cleared, BBS->Contribution);
545
75
      BlockMap[&BB] = BBS;
546
75
    }
547
34
548
34
  // Initialize AvailableIn/Out sets of each BB using only information about
549
34
  // dominating BBs.
550
75
  for (auto &BBI : BlockMap) {
551
75
    gatherDominatingDefs(BBI.first, BBI.second->AvailableIn, DT);
552
75
    transferBlock(BBI.first, *BBI.second, true);
553
75
  }
554
34
555
34
  // Simulate the flow of defs through the CFG and recalculate AvailableIn/Out
556
34
  // sets of each BB until it converges. If any def is proved to be an
557
34
  // unrelocated pointer, it will be removed from all BBSs.
558
34
  recalculateBBsStates();
559
34
}
560
561
282
BasicBlockState *GCPtrTracker::getBasicBlockState(const BasicBlock *BB) {
562
282
  auto it = BlockMap.find(BB);
563
282
  return it != BlockMap.end() ? 
it->second280
:
nullptr2
;
564
282
}
565
566
const BasicBlockState *GCPtrTracker::getBasicBlockState(
567
24
    const BasicBlock *BB) const {
568
24
  return const_cast<GCPtrTracker *>(this)->getBasicBlockState(BB);
569
24
}
570
571
234
bool GCPtrTracker::instructionMayBeSkipped(const Instruction *I) const {
572
234
  // Poisoned defs are skipped since they are always safe by itself by
573
234
  // definition (for details see comment to this class).
574
234
  return ValidUnrelocatedDefs.count(I) || 
PoisonedDefs.count(I)211
;
575
234
}
576
577
void GCPtrTracker::verifyFunction(GCPtrTracker &&Tracker,
578
34
                                  InstructionVerifier &Verifier) {
579
34
  // We need RPO here to a) report always the first error b) report errors in
580
34
  // same order from run to run.
581
34
  ReversePostOrderTraversal<const Function *> RPOT(&Tracker.F);
582
75
  for (const BasicBlock *BB : RPOT) {
583
75
    BasicBlockState *BBS = Tracker.getBasicBlockState(BB);
584
75
    if (!BBS)
585
0
      continue;
586
75
587
75
    // We destructively modify AvailableIn as we traverse the block instruction
588
75
    // by instruction.
589
75
    AvailableValueSet &AvailableSet = BBS->AvailableIn;
590
234
    for (const Instruction &I : *BB) {
591
234
      if (Tracker.instructionMayBeSkipped(&I))
592
32
        continue; // This instruction shouldn't be added to AvailableSet.
593
202
594
202
      Verifier.verifyInstruction(&Tracker, I, AvailableSet);
595
202
596
202
      // Model the effect of current instruction on AvailableSet to keep the set
597
202
      // relevant at each point of BB.
598
202
      bool Cleared = false;
599
202
      transferInstruction(I, Cleared, AvailableSet);
600
202
      (void)Cleared;
601
202
    }
602
75
  }
603
34
}
604
605
34
void GCPtrTracker::recalculateBBsStates() {
606
34
  SetVector<const BasicBlock *> Worklist;
607
34
  // TODO: This order is suboptimal, it's better to replace it with priority
608
34
  // queue where priority is RPO number of BB.
609
34
  for (auto &BBI : BlockMap)
610
75
    Worklist.insert(BBI.first);
611
34
612
34
  // This loop iterates the AvailableIn/Out sets until it converges.
613
34
  // The AvailableIn and AvailableOut sets decrease as we iterate.
614
112
  while (!Worklist.empty()) {
615
78
    const BasicBlock *BB = Worklist.pop_back_val();
616
78
    BasicBlockState *BBS = getBasicBlockState(BB);
617
78
    if (!BBS)
618
0
      continue; // Ignore dead successors.
619
78
620
78
    size_t OldInCount = BBS->AvailableIn.size();
621
138
    for (const_pred_iterator PredIt(BB), End(BB, true); PredIt != End; 
++PredIt60
) {
622
60
      const BasicBlock *PBB = *PredIt;
623
60
      BasicBlockState *PBBS = getBasicBlockState(PBB);
624
60
      if (PBBS && 
!CD.isDeadEdge(&CFGDeadness::getEdge(PredIt))59
)
625
59
        set_intersect(BBS->AvailableIn, PBBS->AvailableOut);
626
60
    }
627
78
628
78
    assert(OldInCount >= BBS->AvailableIn.size() && "invariant!");
629
78
630
78
    bool InputsChanged = OldInCount != BBS->AvailableIn.size();
631
78
    bool ContributionChanged =
632
78
        removeValidUnrelocatedDefs(BB, BBS, BBS->Contribution);
633
78
    if (!InputsChanged && 
!ContributionChanged65
)
634
56
      continue;
635
22
636
22
    size_t OldOutCount = BBS->AvailableOut.size();
637
22
    transferBlock(BB, *BBS, ContributionChanged);
638
22
    if (OldOutCount != BBS->AvailableOut.size()) {
639
20
      assert(OldOutCount > BBS->AvailableOut.size() && "invariant!");
640
20
      Worklist.insert(succ_begin(BB), succ_end(BB));
641
20
    }
642
22
  }
643
34
}
644
645
bool GCPtrTracker::removeValidUnrelocatedDefs(const BasicBlock *BB,
646
                                              const BasicBlockState *BBS,
647
78
                                              AvailableValueSet &Contribution) {
648
78
  assert(&BBS->Contribution == &Contribution &&
649
78
         "Passed Contribution should be from the passed BasicBlockState!");
650
78
  AvailableValueSet AvailableSet = BBS->AvailableIn;
651
78
  bool ContributionChanged = false;
652
78
  // For explanation why instructions are processed this way see
653
78
  // "Rules of deriving" in the comment to this class.
654
253
  for (const Instruction &I : *BB) {
655
253
    bool ValidUnrelocatedPointerDef = false;
656
253
    bool PoisonedPointerDef = false;
657
253
    // TODO: `select` instructions should be handled here too.
658
253
    if (const PHINode *PN = dyn_cast<PHINode>(&I)) {
659
27
      if (containsGCPtrType(PN->getType())) {
660
27
        // If both is true, output is poisoned.
661
27
        bool HasRelocatedInputs = false;
662
27
        bool HasUnrelocatedInputs = false;
663
79
        for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; 
++i52
) {
664
54
          const BasicBlock *InBB = PN->getIncomingBlock(i);
665
54
          if (!isMapped(InBB) ||
666
54
              
!CD.hasLiveIncomingEdge(PN, InBB)53
)
667
1
            continue; // Skip dead block or dead edge.
668
53
669
53
          const Value *InValue = PN->getIncomingValue(i);
670
53
671
53
          if (isNotExclusivelyConstantDerived(InValue)) {
672
44
            if (isValuePoisoned(InValue)) {
673
2
              // If any of inputs is poisoned, output is always poisoned too.
674
2
              HasRelocatedInputs = true;
675
2
              HasUnrelocatedInputs = true;
676
2
              break;
677
2
            }
678
42
            if (BlockMap[InBB]->AvailableOut.count(InValue))
679
34
              HasRelocatedInputs = true;
680
8
            else
681
8
              HasUnrelocatedInputs = true;
682
42
          }
683
53
        }
684
27
        if (HasUnrelocatedInputs) {
685
10
          if (HasRelocatedInputs)
686
8
            PoisonedPointerDef = true;
687
2
          else
688
2
            ValidUnrelocatedPointerDef = true;
689
10
        }
690
27
      }
691
226
    } else if ((isa<GetElementPtrInst>(I) || 
isa<BitCastInst>(I)185
) &&
692
226
               
containsGCPtrType(I.getType())56
) {
693
56
      // GEP/bitcast of unrelocated pointer is legal by itself but this def
694
56
      // shouldn't appear in any AvailableSet.
695
56
      for (const Value *V : I.operands())
696
84
        if (containsGCPtrType(V->getType()) &&
697
84
            
isNotExclusivelyConstantDerived(V)56
&&
!AvailableSet.count(V)44
) {
698
22
          if (isValuePoisoned(V))
699
1
            PoisonedPointerDef = true;
700
21
          else
701
21
            ValidUnrelocatedPointerDef = true;
702
22
          break;
703
22
        }
704
56
    }
705
253
    assert(!(ValidUnrelocatedPointerDef && PoisonedPointerDef) &&
706
253
           "Value cannot be both unrelocated and poisoned!");
707
253
    if (ValidUnrelocatedPointerDef) {
708
23
      // Remove def of unrelocated pointer from Contribution of this BB and
709
23
      // trigger update of all its successors.
710
23
      Contribution.erase(&I);
711
23
      PoisonedDefs.erase(&I);
712
23
      ValidUnrelocatedDefs.insert(&I);
713
23
      LLVM_DEBUG(dbgs() << "Removing urelocated " << I
714
23
                        << " from Contribution of " << BB->getName() << "\n");
715
23
      ContributionChanged = true;
716
230
    } else if (PoisonedPointerDef) {
717
9
      // Mark pointer as poisoned, remove its def from Contribution and trigger
718
9
      // update of all successors.
719
9
      Contribution.erase(&I);
720
9
      PoisonedDefs.insert(&I);
721
9
      LLVM_DEBUG(dbgs() << "Removing poisoned " << I << " from Contribution of "
722
9
                        << BB->getName() << "\n");
723
9
      ContributionChanged = true;
724
221
    } else {
725
221
      bool Cleared = false;
726
221
      transferInstruction(I, Cleared, AvailableSet);
727
221
      (void)Cleared;
728
221
    }
729
253
  }
730
78
  return ContributionChanged;
731
78
}
732
733
void GCPtrTracker::gatherDominatingDefs(const BasicBlock *BB,
734
                                        AvailableValueSet &Result,
735
75
                                        const DominatorTree &DT) {
736
75
  DomTreeNode *DTN = DT[const_cast<BasicBlock *>(BB)];
737
75
738
75
  assert(DTN && "Unreachable blocks are ignored");
739
117
  while (DTN->getIDom()) {
740
45
    DTN = DTN->getIDom();
741
45
    auto BBS = getBasicBlockState(DTN->getBlock());
742
45
    assert(BBS && "immediate dominator cannot be dead for a live block");
743
45
    const auto &Defs = BBS->Contribution;
744
45
    Result.insert(Defs.begin(), Defs.end());
745
45
    // If this block is 'Cleared', then nothing LiveIn to this block can be
746
45
    // available after this block completes.  Note: This turns out to be
747
45
    // really important for reducing memory consuption of the initial available
748
45
    // sets and thus peak memory usage by this verifier.
749
45
    if (BBS->Cleared)
750
3
      return;
751
45
  }
752
75
753
75
  
for (const Argument &A : BB->getParent()->args())72
754
121
    if (containsGCPtrType(A.getType()))
755
79
      Result.insert(&A);
756
72
}
757
758
void GCPtrTracker::transferBlock(const BasicBlock *BB, BasicBlockState &BBS,
759
97
                                 bool ContributionChanged) {
760
97
  const AvailableValueSet &AvailableIn = BBS.AvailableIn;
761
97
  AvailableValueSet &AvailableOut = BBS.AvailableOut;
762
97
763
97
  if (BBS.Cleared) {
764
44
    // AvailableOut will change only when Contribution changed.
765
44
    if (ContributionChanged)
766
44
      AvailableOut = BBS.Contribution;
767
53
  } else {
768
53
    // Otherwise, we need to reduce the AvailableOut set by things which are no
769
53
    // longer in our AvailableIn
770
53
    AvailableValueSet Temp = BBS.Contribution;
771
53
    set_union(Temp, AvailableIn);
772
53
    AvailableOut = std::move(Temp);
773
53
  }
774
97
775
97
  LLVM_DEBUG(dbgs() << "Transfered block " << BB->getName() << " from ";
776
97
             PrintValueSet(dbgs(), AvailableIn.begin(), AvailableIn.end());
777
97
             dbgs() << " to ";
778
97
             PrintValueSet(dbgs(), AvailableOut.begin(), AvailableOut.end());
779
97
             dbgs() << "\n";);
780
97
}
781
782
void GCPtrTracker::transferInstruction(const Instruction &I, bool &Cleared,
783
657
                                       AvailableValueSet &Available) {
784
657
  if (isStatepoint(I)) {
785
112
    Cleared = true;
786
112
    Available.clear();
787
545
  } else if (containsGCPtrType(I.getType()))
788
242
    Available.insert(&I);
789
657
}
790
791
void InstructionVerifier::verifyInstruction(
792
    const GCPtrTracker *Tracker, const Instruction &I,
793
202
    const AvailableValueSet &AvailableSet) {
794
202
  if (const PHINode *PN = dyn_cast<PHINode>(&I)) {
795
12
    if (containsGCPtrType(PN->getType()))
796
36
      
for (unsigned i = 0, e = PN->getNumIncomingValues(); 12
i != e;
++i24
) {
797
24
        const BasicBlock *InBB = PN->getIncomingBlock(i);
798
24
        const BasicBlockState *InBBS = Tracker->getBasicBlockState(InBB);
799
24
        if (!InBBS ||
800
24
            
!Tracker->hasLiveIncomingEdge(PN, InBB)23
)
801
1
          continue; // Skip dead block or dead edge.
802
23
803
23
        const Value *InValue = PN->getIncomingValue(i);
804
23
805
23
        if (isNotExclusivelyConstantDerived(InValue) &&
806
23
            
!InBBS->AvailableOut.count(InValue)16
)
807
0
          reportInvalidUse(*InValue, *PN);
808
23
      }
809
190
  } else if (isa<CmpInst>(I) &&
810
190
             
containsGCPtrType(I.getOperand(0)->getType())19
) {
811
19
    Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
812
19
    enum BaseType baseTyLHS = getBaseType(LHS),
813
19
                  baseTyRHS = getBaseType(RHS);
814
19
815
19
    // Returns true if LHS and RHS are unrelocated pointers and they are
816
19
    // valid unrelocated uses.
817
19
    auto hasValidUnrelocatedUse = [&AvailableSet, Tracker, baseTyLHS, baseTyRHS,
818
19
                                   &LHS, &RHS] () {
819
19
        // A cmp instruction has valid unrelocated pointer operands only if
820
19
        // both operands are unrelocated pointers.
821
19
        // In the comparison between two pointers, if one is an unrelocated
822
19
        // use, the other *should be* an unrelocated use, for this
823
19
        // instruction to contain valid unrelocated uses. This unrelocated
824
19
        // use can be a null constant as well, or another unrelocated
825
19
        // pointer.
826
19
        if (AvailableSet.count(LHS) || 
AvailableSet.count(RHS)18
)
827
5
          return false;
828
14
        // Constant pointers (that are not exclusively null) may have
829
14
        // meaning in different VMs, so we cannot reorder the compare
830
14
        // against constant pointers before the safepoint. In other words,
831
14
        // comparison of an unrelocated use against a non-null constant
832
14
        // maybe invalid.
833
14
        if ((baseTyLHS == BaseType::ExclusivelySomeConstant &&
834
14
             
baseTyRHS == BaseType::NonConstant1
) ||
835
14
            
(13
baseTyLHS == BaseType::NonConstant13
&&
836
13
             baseTyRHS == BaseType::ExclusivelySomeConstant))
837
2
          return false;
838
12
839
12
        // If one of pointers is poisoned and other is not exclusively derived
840
12
        // from null it is an invalid expression: it produces poisoned result
841
12
        // and unless we want to track all defs (not only gc pointers) the only
842
12
        // option is to prohibit such instructions.
843
12
        if ((Tracker->isValuePoisoned(LHS) && 
baseTyRHS != ExclusivelyNull3
) ||
844
12
            
(10
Tracker->isValuePoisoned(RHS)10
&&
baseTyLHS != ExclusivelyNull0
))
845
2
            return false;
846
10
847
10
        // All other cases are valid cases enumerated below:
848
10
        // 1. Comparison between an exclusively derived null pointer and a
849
10
        // constant base pointer.
850
10
        // 2. Comparison between an exclusively derived null pointer and a
851
10
        // non-constant unrelocated base pointer.
852
10
        // 3. Comparison between 2 unrelocated pointers.
853
10
        // 4. Comparison between a pointer exclusively derived from null and a
854
10
        // non-constant poisoned pointer.
855
10
        return true;
856
10
    };
857
19
    if (!hasValidUnrelocatedUse()) {
858
9
      // Print out all non-constant derived pointers that are unrelocated
859
9
      // uses, which are invalid.
860
9
      if (baseTyLHS == BaseType::NonConstant && 
!AvailableSet.count(LHS)8
)
861
7
        reportInvalidUse(*LHS, I);
862
9
      if (baseTyRHS == BaseType::NonConstant && 
!AvailableSet.count(RHS)8
)
863
3
        reportInvalidUse(*RHS, I);
864
9
    }
865
171
  } else {
866
171
    for (const Value *V : I.operands())
867
694
      if (containsGCPtrType(V->getType()) &&
868
694
          
isNotExclusivelyConstantDerived(V)77
&&
!AvailableSet.count(V)54
)
869
8
        reportInvalidUse(*V, I);
870
171
  }
871
202
}
872
873
void InstructionVerifier::reportInvalidUse(const Value &V,
874
18
                                           const Instruction &I) {
875
18
  errs() << "Illegal use of unrelocated value found!\n";
876
18
  errs() << "Def: " << V << "\n";
877
18
  errs() << "Use: " << I << "\n";
878
18
  if (!PrintOnly)
879
0
    abort();
880
18
  AnyInvalidUses = true;
881
18
}
882
883
static void Verify(const Function &F, const DominatorTree &DT,
884
34
                   const CFGDeadness &CD) {
885
34
  LLVM_DEBUG(dbgs() << "Verifying gc pointers in function: " << F.getName()
886
34
                    << "\n");
887
34
  if (PrintOnly)
888
34
    dbgs() << "Verifying gc pointers in function: " << F.getName() << "\n";
889
34
890
34
  GCPtrTracker Tracker(F, DT, CD);
891
34
892
34
  // We now have all the information we need to decide if the use of a heap
893
34
  // reference is legal or not, given our safepoint semantics.
894
34
895
34
  InstructionVerifier Verifier;
896
34
  GCPtrTracker::verifyFunction(std::move(Tracker), Verifier);
897
34
898
34
  if (PrintOnly && !Verifier.hasAnyInvalidUses()) {
899
18
    dbgs() << "No illegal uses found by SafepointIRVerifier in: " << F.getName()
900
18
           << "\n";
901
18
  }
902
34
}