Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/CodeGen/SwiftErrorValueTracking.cpp
Line
Count
Source (jump to first uncovered line)
1
//===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This implements a limited mem2reg-like analysis to promote uses of function
10
// arguments and allocas marked with swiftalloc from memory into virtual
11
// registers tracked by this class.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "llvm/CodeGen/SwiftErrorValueTracking.h"
16
#include "llvm/ADT/SmallSet.h"
17
#include "llvm/CodeGen/MachineRegisterInfo.h"
18
#include "llvm/CodeGen/MachineInstrBuilder.h"
19
#include "llvm/CodeGen/TargetInstrInfo.h"
20
#include "llvm/CodeGen/TargetLowering.h"
21
#include "llvm/IR/Value.h"
22
23
using namespace llvm;
24
25
Register SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
26
562
                                                  const Value *Val) {
27
562
  auto Key = std::make_pair(MBB, Val);
28
562
  auto It = VRegDefMap.find(Key);
29
562
  // If this is the first use of this swifterror value in this basic block,
30
562
  // create a new virtual register.
31
562
  // After we processed all basic blocks we will satisfy this "upwards exposed
32
562
  // use" by inserting a copy or phi at the beginning of this block.
33
562
  if (It == VRegDefMap.end()) {
34
53
    auto &DL = MF->getDataLayout();
35
53
    const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
36
53
    auto VReg = MF->getRegInfo().createVirtualRegister(RC);
37
53
    VRegDefMap[Key] = VReg;
38
53
    VRegUpwardsUse[Key] = VReg;
39
53
    return VReg;
40
53
  } else
41
509
    return It->second;
42
562
}
43
44
void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
45
558
                                             const Value *Val, Register VReg) {
46
558
  VRegDefMap[std::make_pair(MBB, Val)] = VReg;
47
558
}
48
49
Register SwiftErrorValueTracking::getOrCreateVRegDefAt(
50
324
    const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
51
324
  auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
52
324
  auto It = VRegDefUses.find(Key);
53
324
  if (It != VRegDefUses.end())
54
82
    return It->second;
55
242
56
242
  auto &DL = MF->getDataLayout();
57
242
  const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
58
242
  Register VReg = MF->getRegInfo().createVirtualRegister(RC);
59
242
  VRegDefUses[Key] = VReg;
60
242
  setCurrentVReg(MBB, Val, VReg);
61
242
  return VReg;
62
242
}
63
64
Register SwiftErrorValueTracking::getOrCreateVRegUseAt(
65
409
    const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
66
409
  auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
67
409
  auto It = VRegDefUses.find(Key);
68
409
  if (It != VRegDefUses.end())
69
106
    return It->second;
70
303
71
303
  Register VReg = getOrCreateVReg(MBB, Val);
72
303
  VRegDefUses[Key] = VReg;
73
303
  return VReg;
74
303
}
75
76
/// Set up SwiftErrorVals by going through the function. If the function has
77
/// swifterror argument, it will be the first entry.
78
515k
void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
79
515k
  MF = &mf;
80
515k
  Fn = &MF->getFunction();
81
515k
  TLI = MF->getSubtarget().getTargetLowering();
82
515k
  TII = MF->getSubtarget().getInstrInfo();
83
515k
84
515k
  if (!TLI->supportSwiftError())
85
95.5k
    return;
86
420k
87
420k
  SwiftErrorVals.clear();
88
420k
  VRegDefMap.clear();
89
420k
  VRegUpwardsUse.clear();
90
420k
  VRegDefUses.clear();
91
420k
  SwiftErrorArg = nullptr;
92
420k
93
420k
  // Check if function has a swifterror argument.
94
420k
  bool HaveSeenSwiftErrorArg = false;
95
420k
  for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
96
1.32M
       AI != AE; 
++AI906k
)
97
906k
    if (AI->hasSwiftErrorAttr()) {
98
111
      assert(!HaveSeenSwiftErrorArg &&
99
111
             "Must have only one swifterror parameter");
100
111
      (void)HaveSeenSwiftErrorArg; // silence warning.
101
111
      HaveSeenSwiftErrorArg = true;
102
111
      SwiftErrorArg = &*AI;
103
111
      SwiftErrorVals.push_back(&*AI);
104
111
    }
105
420k
106
420k
  for (const auto &LLVMBB : *Fn)
107
18.0M
    
for (const auto &Inst : LLVMBB)2.99M
{
108
18.0M
      if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
109
87.5k
        if (Alloca->isSwiftError())
110
72
          SwiftErrorVals.push_back(Alloca);
111
18.0M
    }
112
420k
}
113
114
515k
bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
115
515k
  if (!TLI->supportSwiftError())
116
95.5k
    return false;
117
420k
118
420k
  // We only need to do this when we have swifterror parameter or swifterror
119
420k
  // alloc.
120
420k
  if (SwiftErrorVals.empty())
121
420k
    return false;
122
157
123
157
  MachineBasicBlock *MBB = &*MF->begin();
124
157
  auto &DL = MF->getDataLayout();
125
157
  auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
126
157
  bool Inserted = false;
127
183
  for (const auto *SwiftErrorVal : SwiftErrorVals) {
128
183
    // We will always generate a copy from the argument. It is always used at
129
183
    // least by the 'return' of the swifterror.
130
183
    if (SwiftErrorArg && 
SwiftErrorArg == SwiftErrorVal127
)
131
111
      continue;
132
72
    Register VReg = MF->getRegInfo().createVirtualRegister(RC);
133
72
    // Assign Undef to Vreg. We construct MI directly to make sure it works
134
72
    // with FastISel.
135
72
    BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
136
72
            TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
137
72
138
72
    setCurrentVReg(MBB, SwiftErrorVal, VReg);
139
72
    Inserted = true;
140
72
  }
141
157
142
157
  return Inserted;
143
157
}
144
145
/// Propagate swifterror values through the machine function CFG.
146
515k
void SwiftErrorValueTracking::propagateVRegs() {
147
515k
  if (!TLI->supportSwiftError())
148
95.5k
    return;
149
419k
150
419k
  // We only need to do this when we have swifterror parameter or swifterror
151
419k
  // alloc.
152
419k
  if (SwiftErrorVals.empty())
153
419k
    return;
154
162
155
162
  // For each machine basic block in reverse post order.
156
162
  ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
157
374
  for (MachineBasicBlock *MBB : RPOT) {
158
374
    // For each swifterror value in the function.
159
412
    for (const auto *SwiftErrorVal : SwiftErrorVals) {
160
412
      auto Key = std::make_pair(MBB, SwiftErrorVal);
161
412
      auto UUseIt = VRegUpwardsUse.find(Key);
162
412
      auto VRegDefIt = VRegDefMap.find(Key);
163
412
      bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
164
412
      Register UUseVReg = UpwardsUse ? 
UUseIt->second53
:
Register()359
;
165
412
      bool DownwardDef = VRegDefIt != VRegDefMap.end();
166
412
      assert(!(UpwardsUse && !DownwardDef) &&
167
412
             "We can't have an upwards use but no downwards def");
168
412
169
412
      // If there is no upwards exposed use and an entry for the swifterror in
170
412
      // the def map for this value we don't need to do anything: We already
171
412
      // have a downward def for this basic block.
172
412
      if (!UpwardsUse && 
DownwardDef359
)
173
226
        continue;
174
186
175
186
      // Otherwise we either have an upwards exposed use vreg that we need to
176
186
      // materialize or need to forward the downward def from predecessors.
177
186
178
186
      // Check whether we have a single vreg def from all predecessors.
179
186
      // Otherwise we need a phi.
180
186
      SmallVector<std::pair<MachineBasicBlock *, Register>, 4> VRegs;
181
186
      SmallSet<const MachineBasicBlock *, 8> Visited;
182
259
      for (auto *Pred : MBB->predecessors()) {
183
259
        if (!Visited.insert(Pred).second)
184
0
          continue;
185
259
        VRegs.push_back(std::make_pair(
186
259
            Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
187
259
        if (Pred != MBB)
188
257
          continue;
189
2
        // We have a self-edge.
190
2
        // If there was no upwards use in this basic block there is now one: the
191
2
        // phi needs to use it self.
192
2
        if (!UpwardsUse) {
193
0
          UpwardsUse = true;
194
0
          UUseIt = VRegUpwardsUse.find(Key);
195
0
          assert(UUseIt != VRegUpwardsUse.end());
196
0
          UUseVReg = UUseIt->second;
197
0
        }
198
2
      }
199
186
200
186
      // We need a phi node if we have more than one predecessor with different
201
186
      // downward defs.
202
186
      bool needPHI =
203
186
          VRegs.size() >= 1 &&
204
186
          std::find_if(
205
186
              VRegs.begin(), VRegs.end(),
206
186
              [&](const std::pair<const MachineBasicBlock *, Register> &V)
207
259
                  -> bool { return V.second != VRegs[0].second; }) !=
208
186
              VRegs.end();
209
186
210
186
      // If there is no upwards exposed used and we don't need a phi just
211
186
      // forward the swifterror vreg from the predecessor(s).
212
186
      if (!UpwardsUse && 
!needPHI133
) {
213
123
        assert(!VRegs.empty() &&
214
123
               "No predecessors? The entry block should bail out earlier");
215
123
        // Just forward the swifterror vreg from the predecessor(s).
216
123
        setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
217
123
        continue;
218
123
      }
219
63
220
63
      auto DLoc = isa<Instruction>(SwiftErrorVal)
221
63
                      ? 
cast<Instruction>(SwiftErrorVal)->getDebugLoc()6
222
63
                      : 
DebugLoc()57
;
223
63
      const auto *TII = MF->getSubtarget().getInstrInfo();
224
63
225
63
      // If we don't need a phi create a copy to the upward exposed vreg.
226
63
      if (!needPHI) {
227
41
        assert(UpwardsUse);
228
41
        assert(!VRegs.empty() &&
229
41
               "No predecessors?  Is the Calling Convention correct?");
230
41
        Register DestReg = UUseVReg;
231
41
        BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
232
41
                DestReg)
233
41
            .addReg(VRegs[0].second);
234
41
        continue;
235
41
      }
236
22
237
22
      // We need a phi: if there is an upwards exposed use we already have a
238
22
      // destination virtual register number otherwise we generate a new one.
239
22
      auto &DL = MF->getDataLayout();
240
22
      auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
241
22
      Register PHIVReg =
242
22
          UpwardsUse ? 
UUseVReg12
:
MF->getRegInfo().createVirtualRegister(RC)10
;
243
22
      MachineInstrBuilder PHI =
244
22
          BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
245
22
                  TII->get(TargetOpcode::PHI), PHIVReg);
246
44
      for (auto BBRegPair : VRegs) {
247
44
        PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
248
44
      }
249
22
250
22
      // We did not have a definition in this block before: store the phi's vreg
251
22
      // as this block downward exposed def.
252
22
      if (!UpwardsUse)
253
10
        setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
254
22
    }
255
374
  }
256
162
}
257
258
void SwiftErrorValueTracking::preassignVRegs(
259
    MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
260
15.8k
    BasicBlock::const_iterator End) {
261
15.8k
  if (!TLI->supportSwiftError() || 
SwiftErrorVals.empty()9.89k
)
262
15.7k
    return;
263
121
264
121
  // Iterator over instructions and assign vregs to swifterror defs and uses.
265
582
  
for (auto It = Begin; 121
It != End;
++It461
) {
266
461
    ImmutableCallSite CS(&*It);
267
461
    if (CS) {
268
80
      // A call-site with a swifterror argument is both use and def.
269
80
      const Value *SwiftErrorAddr = nullptr;
270
195
      for (auto &Arg : CS.args()) {
271
195
        if (!Arg->isSwiftError())
272
152
          continue;
273
43
        // Use of swifterror.
274
43
        assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
275
43
        SwiftErrorAddr = &*Arg;
276
43
        assert(SwiftErrorAddr->isSwiftError() &&
277
43
               "Must have a swifterror value argument");
278
43
        getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
279
43
      }
280
80
      if (!SwiftErrorAddr)
281
37
        continue;
282
43
283
43
      // Def of swifterror.
284
43
      getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
285
43
286
43
      // A load is a use.
287
381
    } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
288
41
      const Value *V = LI->getOperand(0);
289
41
      if (!V->isSwiftError())
290
22
        continue;
291
19
292
19
      getOrCreateVRegUseAt(LI, MBB, V);
293
19
294
19
      // A store is a def.
295
340
    } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
296
82
      const Value *SwiftErrorAddr = SI->getOperand(1);
297
82
      if (!SwiftErrorAddr->isSwiftError())
298
43
        continue;
299
39
300
39
      // Def of swifterror.
301
39
      getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
302
39
303
39
      // A return in a swiferror returning function is a use.
304
258
    } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
305
62
      const Function *F = R->getParent()->getParent();
306
62
      if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
307
18
        continue;
308
44
309
44
      getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);
310
44
    }
311
461
  }
312
121
}