Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Analysis/TypeMetadataUtils.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file contains functions that make it easier to manipulate type metadata
10
// for devirtualization.
11
//
12
//===----------------------------------------------------------------------===//
13
14
#include "llvm/Analysis/TypeMetadataUtils.h"
15
#include "llvm/IR/Constants.h"
16
#include "llvm/IR/Dominators.h"
17
#include "llvm/IR/Intrinsics.h"
18
#include "llvm/IR/Module.h"
19
20
using namespace llvm;
21
22
// Search for virtual calls that call FPtr and add them to DevirtCalls.
23
static void
24
findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
25
                          bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
26
296
                          const CallInst *CI, DominatorTree &DT) {
27
304
  for (const Use &U : FPtr->uses()) {
28
304
    Instruction *User = cast<Instruction>(U.getUser());
29
304
    // Ignore this instruction if it is not dominated by the type intrinsic
30
304
    // being analyzed. Otherwise we may transform a call sharing the same
31
304
    // vtable pointer incorrectly. Specifically, this situation can arise
32
304
    // after indirect call promotion and inlining, where we may have uses
33
304
    // of the vtable pointer guarded by a function pointer check, and a fallback
34
304
    // indirect call.
35
304
    if (!DT.dominates(CI, User))
36
10
      continue;
37
294
    if (isa<BitCastInst>(User)) {
38
131
      findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
39
131
                                DT);
40
163
    } else if (auto CI = dyn_cast<CallInst>(User)) {
41
158
      DevirtCalls.push_back({Offset, CI});
42
158
    } else 
if (auto 5
II5
= dyn_cast<InvokeInst>(User)) {
43
1
      DevirtCalls.push_back({Offset, II});
44
4
    } else if (HasNonCallUses) {
45
0
      *HasNonCallUses = true;
46
0
    }
47
294
  }
48
296
}
49
50
// Search for virtual calls that load from VPtr and add them to DevirtCalls.
51
static void findLoadCallsAtConstantOffset(
52
    const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
53
361
    int64_t Offset, const CallInst *CI, DominatorTree &DT) {
54
492
  for (const Use &U : VPtr->uses()) {
55
492
    Value *User = U.getUser();
56
492
    if (isa<BitCastInst>(User)) {
57
146
      findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
58
346
    } else if (isa<LoadInst>(User)) {
59
123
      findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
60
223
    } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
61
104
      // Take into account the GEP offset.
62
105
      if (
VPtr == GEP->getPointerOperand()104
&& GEP->hasAllConstantIndices()) {
63
105
        SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
64
105
        int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
65
105
            GEP->getSourceElementType(), Indices);
66
105
        findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
67
105
                                      CI, DT);
68
105
      }
69
104
    }
70
492
  }
71
361
}
72
73
void llvm::findDevirtualizableCallsForTypeTest(
74
    SmallVectorImpl<DevirtCallSite> &DevirtCalls,
75
    SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
76
155
    DominatorTree &DT) {
77
155
  assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test);
78
155
79
155
  const Module *M = CI->getParent()->getParent()->getParent();
80
155
81
155
  // Find llvm.assume intrinsics for this llvm.type.test call.
82
155
  for (const Use &CIU : CI->uses()) {
83
149
    if (auto *AssumeCI = dyn_cast<CallInst>(CIU.getUser())) {
84
110
      Function *F = AssumeCI->getCalledFunction();
85
110
      if (F && F->getIntrinsicID() == Intrinsic::assume)
86
110
        Assumes.push_back(AssumeCI);
87
110
    }
88
149
  }
89
155
90
155
  // If we found any, search for virtual calls based on %p and add them to
91
155
  // DevirtCalls.
92
155
  if (!Assumes.empty())
93
110
    findLoadCallsAtConstantOffset(
94
110
        M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
95
155
}
96
97
void llvm::findDevirtualizableCallsForTypeCheckedLoad(
98
    SmallVectorImpl<DevirtCallSite> &DevirtCalls,
99
    SmallVectorImpl<Instruction *> &LoadedPtrs,
100
    SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
101
46
    const CallInst *CI, DominatorTree &DT) {
102
46
  assert(CI->getCalledFunction()->getIntrinsicID() ==
103
46
         Intrinsic::type_checked_load);
104
46
105
46
  auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
106
46
  if (!Offset) {
107
0
    HasNonCallUses = true;
108
0
    return;
109
0
  }
110
46
111
84
  
for (const Use &U : CI->uses())46
{
112
84
    auto CIU = U.getUser();
113
84
    if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
114
81
      if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
115
42
        LoadedPtrs.push_back(EVI);
116
42
        continue;
117
42
      }
118
39
      if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
119
39
        Preds.push_back(EVI);
120
39
        continue;
121
39
      }
122
3
    }
123
3
    HasNonCallUses = true;
124
3
  }
125
46
126
46
  for (Value *LoadedPtr : LoadedPtrs)
127
42
    findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
128
42
                              Offset->getZExtValue(), CI, DT);
129
46
}