Coverage Report

Created: 2017-11-23 03:11

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/polly/lib/Support/VirtualInstruction.cpp
Line
Count
Source (jump to first uncovered line)
1
//===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
// Tools for determining which instructions are within a statement and the
11
// nature of their operands.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "polly/Support/VirtualInstruction.h"
16
#include "polly/Support/SCEVValidator.h"
17
18
using namespace polly;
19
using namespace llvm;
20
21
VirtualUse VirtualUse ::create(Scop *S, const Use &U, LoopInfo *LI,
22
0
                               bool Virtual) {
23
0
  auto *UserBB = getUseBlock(U);
24
0
  Instruction *UI = dyn_cast<Instruction>(U.getUser());
25
0
  ScopStmt *UserStmt = nullptr;
26
0
  if (PHINode *PHI = dyn_cast<PHINode>(UI))
27
0
    UserStmt = S->getLastStmtFor(PHI->getIncomingBlock(U));
28
0
  else
29
0
    UserStmt = S->getStmtFor(UI);
30
0
  auto *UserScope = LI->getLoopFor(UserBB);
31
0
  return create(S, UserStmt, UserScope, U.get(), Virtual);
32
0
}
33
34
VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
35
17.7k
                              Value *Val, bool Virtual) {
36
17.7k
  assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
37
17.7k
38
17.7k
  if (isa<BasicBlock>(Val))
39
665
    return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
40
17.1k
41
17.1k
  if (isa<llvm::Constant>(Val) || 
isa<MetadataAsValue>(Val)13.2k
)
42
3.81k
    return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
43
13.2k
44
13.2k
  // Is the value synthesizable? If the user has been pruned
45
13.2k
  // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
46
13.2k
  // We assume synthesizable which practically should have the same effect.
47
13.2k
  auto *SE = S->getSE();
48
13.2k
  if (SE->isSCEVable(Val->getType())) {
49
10.6k
    auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
50
10.6k
    if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
51
7.83k
      return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
52
5.45k
  }
53
5.45k
54
5.45k
  // FIXME: Inconsistency between lookupInvariantEquivClass and
55
5.45k
  // getRequiredInvariantLoads. Querying one of them should be enough.
56
5.45k
  auto &RIL = S->getRequiredInvariantLoads();
57
5.45k
  if (S->lookupInvariantEquivClass(Val) || 
RIL.count(dyn_cast<LoadInst>(Val))5.40k
)
58
47
    return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
59
5.40k
60
5.40k
  // ReadOnly uses may have MemoryAccesses that we want to associate with the
61
5.40k
  // use. This is why we look for a MemoryAccess here already.
62
5.40k
  MemoryAccess *InputMA = nullptr;
63
5.40k
  if (UserStmt && Virtual)
64
1.46k
    InputMA = UserStmt->lookupValueReadOf(Val);
65
5.40k
66
5.40k
  // Uses are read-only if they have been defined before the SCoP, i.e., they
67
5.40k
  // cannot be written to inside the SCoP. Arguments are defined before any
68
5.40k
  // instructions, hence also before the SCoP. If the user has been pruned
69
5.40k
  // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
70
5.40k
  // neither an intra- nor an inter-use.
71
5.40k
  if (!UserStmt || isa<Argument>(Val))
72
78
    return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
73
5.32k
74
5.32k
  auto Inst = cast<Instruction>(Val);
75
5.32k
  if (!S->contains(Inst))
76
16
    return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
77
5.31k
78
5.31k
  // A use is inter-statement if either it is defined in another statement, or
79
5.31k
  // there is a MemoryAccess that reads its value that has been written by
80
5.31k
  // another statement.
81
5.31k
  if (InputMA || 
(5.06k
!Virtual5.06k
&&
UserStmt != S->getStmtFor(Inst)3.88k
))
82
577
    return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
83
4.73k
84
4.73k
  return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
85
4.73k
}
86
87
0
void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
88
0
  OS << "User: [" << User->getBaseName() << "] ";
89
0
  switch (Kind) {
90
0
  case VirtualUse::Constant:
91
0
    OS << "Constant Op:";
92
0
    break;
93
0
  case VirtualUse::Block:
94
0
    OS << "BasicBlock Op:";
95
0
    break;
96
0
  case VirtualUse::Synthesizable:
97
0
    OS << "Synthesizable Op:";
98
0
    break;
99
0
  case VirtualUse::Hoisted:
100
0
    OS << "Hoisted load Op:";
101
0
    break;
102
0
  case VirtualUse::ReadOnly:
103
0
    OS << "Read-Only Op:";
104
0
    break;
105
0
  case VirtualUse::Intra:
106
0
    OS << "Intra Op:";
107
0
    break;
108
0
  case VirtualUse::Inter:
109
0
    OS << "Inter Op:";
110
0
    break;
111
0
  }
112
0
113
0
  if (Val) {
114
0
    OS << ' ';
115
0
    if (Reproducible)
116
0
      OS << '"' << Val->getName() << '"';
117
0
    else
118
0
      Val->print(OS, true);
119
0
  }
120
0
  if (ScevExpr) {
121
0
    OS << ' ';
122
0
    ScevExpr->print(OS);
123
0
  }
124
0
  if (InputMA && !Reproducible)
125
0
    OS << ' ' << InputMA;
126
0
}
127
128
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
129
LLVM_DUMP_METHOD void VirtualUse::dump() const {
130
  print(errs(), false);
131
  errs() << '\n';
132
}
133
#endif
134
135
0
void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
136
0
  if (!Stmt || !Inst) {
137
0
    OS << "[null VirtualInstruction]";
138
0
    return;
139
0
  }
140
0
141
0
  OS << "[" << Stmt->getBaseName() << "]";
142
0
  Inst->print(OS, !Reproducible);
143
0
}
144
145
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
146
LLVM_DUMP_METHOD void VirtualInstruction::dump() const {
147
  print(errs(), false);
148
  errs() << '\n';
149
}
150
#endif
151
152
/// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
153
200
static bool isRoot(const Instruction *Inst) {
154
200
  // The store is handled by its MemoryAccess. The load must be reached from the
155
200
  // roots in order to be marked as used.
156
200
  if (isa<LoadInst>(Inst) || 
isa<StoreInst>(Inst)154
)
157
163
    return false;
158
37
159
37
  // Terminator instructions (in region statements) are required for control
160
37
  // flow.
161
37
  if (isa<TerminatorInst>(Inst))
162
0
    return true;
163
37
164
37
  // Writes to memory must be honored.
165
37
  if (Inst->mayWriteToMemory())
166
0
    return true;
167
37
168
37
  return false;
169
37
}
170
171
/// Return true for MemoryAccesses that cannot be removed because it represents
172
/// an llvm::Value that is used after the SCoP.
173
9
static bool isEscaping(MemoryAccess *MA) {
174
9
  assert(MA->isOriginalValueKind());
175
9
  Scop *S = MA->getStatement()->getParent();
176
9
  return S->isEscaping(cast<Instruction>(MA->getAccessValue()));
177
9
}
178
179
/// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
180
static void
181
addInstructionRoots(ScopStmt *Stmt,
182
67
                    SmallVectorImpl<VirtualInstruction> &RootInsts) {
183
67
  if (!Stmt->isBlockStmt()) {
184
9
    // In region statements the terminator statement and all statements that
185
9
    // are not in the entry block cannot be eliminated and consequently must
186
9
    // be roots.
187
9
    RootInsts.emplace_back(Stmt,
188
9
                           Stmt->getRegion()->getEntry()->getTerminator());
189
9
    for (BasicBlock *BB : Stmt->getRegion()->blocks())
190
25
      if (Stmt->getRegion()->getEntry() != BB)
191
16
        for (Instruction &Inst : *BB)
192
26
          RootInsts.emplace_back(Stmt, &Inst);
193
9
    return;
194
9
  }
195
58
196
58
  for (Instruction *Inst : Stmt->getInstructions())
197
200
    if (isRoot(Inst))
198
0
      RootInsts.emplace_back(Stmt, Inst);
199
67
}
200
201
/// Add non-removable memory accesses in @p Stmt to @p RootInsts.
202
///
203
/// @param Local If true, all writes are assumed to escape. markAndSweep
204
/// algorithms can use this to be applicable to a single ScopStmt only without
205
/// the risk of removing definitions required by other statements.
206
///              If false, only writes for SCoP-escaping values are roots.  This
207
///              is global mode, where such writes must be marked by theirs uses
208
///              in order to be reachable.
209
static void addAccessRoots(ScopStmt *Stmt,
210
                           SmallVectorImpl<MemoryAccess *> &RootAccs,
211
67
                           bool Local) {
212
207
  for (auto *MA : *Stmt) {
213
207
    if (!MA->isWrite())
214
81
      continue;
215
126
216
126
    // Writes to arrays are always used.
217
126
    if (MA->isLatestArrayKind())
218
113
      RootAccs.push_back(MA);
219
13
220
13
    // Values are roots if they are escaping.
221
13
    else if (MA->isLatestValueKind()) {
222
9
      if (Local || isEscaping(MA))
223
3
        RootAccs.push_back(MA);
224
9
    }
225
4
226
4
    // Exit phis are, by definition, escaping.
227
4
    else if (MA->isLatestExitPHIKind())
228
0
      RootAccs.push_back(MA);
229
4
230
4
    // phi writes are only roots if we are not visiting the statement
231
4
    // containing the PHINode.
232
4
    else if (Local && 
MA->isLatestPHIKind()0
)
233
0
      RootAccs.push_back(MA);
234
207
  }
235
67
}
236
237
/// Determine all instruction and access roots.
238
static void addRoots(ScopStmt *Stmt,
239
                     SmallVectorImpl<VirtualInstruction> &RootInsts,
240
67
                     SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
241
67
  addInstructionRoots(Stmt, RootInsts);
242
67
  addAccessRoots(Stmt, RootAccs, Local);
243
67
}
244
245
/// Mark accesses and instructions as used if they are reachable from a root,
246
/// walking the operand trees.
247
///
248
/// @param S              The SCoP to walk.
249
/// @param LI             The LoopInfo Analysis.
250
/// @param RootInsts      List of root instructions.
251
/// @param RootAccs       List of root accesses.
252
/// @param UsesInsts[out] Receives all reachable instructions, including the
253
/// roots.
254
/// @param UsedAccs[out]  Receives all reachable accesses, including the roots.
255
/// @param OnlyLocal      If non-nullptr, restricts walking to a single
256
/// statement.
257
static void walkReachable(Scop *S, LoopInfo *LI,
258
                          ArrayRef<VirtualInstruction> RootInsts,
259
                          ArrayRef<MemoryAccess *> RootAccs,
260
                          DenseSet<VirtualInstruction> &UsedInsts,
261
                          DenseSet<MemoryAccess *> &UsedAccs,
262
44
                          ScopStmt *OnlyLocal = nullptr) {
263
44
  UsedInsts.clear();
264
44
  UsedAccs.clear();
265
44
266
44
  SmallVector<VirtualInstruction, 32> WorklistInsts;
267
44
  SmallVector<MemoryAccess *, 32> WorklistAccs;
268
44
269
44
  WorklistInsts.append(RootInsts.begin(), RootInsts.end());
270
44
  WorklistAccs.append(RootAccs.begin(), RootAccs.end());
271
44
272
483
  auto AddToWorklist = [&](VirtualUse VUse) {
273
483
    switch (VUse.getKind()) {
274
483
    case VirtualUse::Block:
275
286
    case VirtualUse::Constant:
276
286
    case VirtualUse::Synthesizable:
277
286
    case VirtualUse::Hoisted:
278
286
      break;
279
286
    case VirtualUse::ReadOnly:
280
2
      // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
281
2
      // enabled.
282
2
      if (!VUse.getMemoryAccess())
283
0
        break;
284
2
      LLVM_FALLTHROUGH;
285
17
    case VirtualUse::Inter:
286
17
      assert(VUse.getMemoryAccess());
287
17
      WorklistAccs.push_back(VUse.getMemoryAccess());
288
17
      break;
289
180
    case VirtualUse::Intra:
290
180
      WorklistInsts.emplace_back(VUse.getUser(),
291
180
                                 cast<Instruction>(VUse.getValue()));
292
180
      break;
293
483
    }
294
483
  };
295
44
296
365
  while (true) {
297
365
    // We have two worklists to process: Only when the MemoryAccess worklist is
298
365
    // empty, we process the instruction worklist.
299
365
300
659
    while (!WorklistAccs.empty()) {
301
294
      auto *Acc = WorklistAccs.pop_back_val();
302
294
303
294
      ScopStmt *Stmt = Acc->getStatement();
304
294
      if (OnlyLocal && 
Stmt != OnlyLocal0
)
305
0
        continue;
306
294
307
294
      auto Inserted = UsedAccs.insert(Acc);
308
294
      if (!Inserted.second)
309
112
        continue;
310
182
311
182
      if (Acc->isRead()) {
312
60
        const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
313
60
314
60
        if (Acc->isLatestValueKind()) {
315
7
          MemoryAccess *DefAcc = S->getValueDef(SAI);
316
7
317
7
          // Accesses to read-only values do not have a definition.
318
7
          if (DefAcc)
319
5
            WorklistAccs.push_back(S->getValueDef(SAI));
320
7
        }
321
60
322
60
        if (Acc->isLatestAnyPHIKind()) {
323
2
          auto IncomingMAs = S->getPHIIncomings(SAI);
324
2
          WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
325
2
        }
326
60
      }
327
182
328
182
      if (Acc->isWrite()) {
329
122
        if (Acc->isOriginalValueKind() ||
330
122
            
(110
Acc->isOriginalArrayKind()110
&&
Acc->getAccessValue()106
)) {
331
118
          Loop *Scope = Stmt->getSurroundingLoop();
332
118
          VirtualUse VUse =
333
118
              VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
334
118
          AddToWorklist(VUse);
335
118
        }
336
122
337
122
        if (Acc->isOriginalAnyPHIKind()) {
338
5
          for (auto Incoming : Acc->getIncoming()) {
339
5
            VirtualUse VUse = VirtualUse::create(
340
5
                S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
341
5
            AddToWorklist(VUse);
342
5
          }
343
4
        }
344
122
345
122
        if (Acc->isOriginalArrayKind())
346
106
          WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
347
122
      }
348
294
    }
349
365
350
365
    // If both worklists are empty, stop walking.
351
365
    if (WorklistInsts.empty())
352
44
      break;
353
321
354
321
    VirtualInstruction VInst = WorklistInsts.pop_back_val();
355
321
    ScopStmt *Stmt = VInst.getStmt();
356
321
    Instruction *Inst = VInst.getInstruction();
357
321
358
321
    // Do not process statements other than the local.
359
321
    if (OnlyLocal && 
Stmt != OnlyLocal0
)
360
0
      continue;
361
321
362
321
    auto InsertResult = UsedInsts.insert(VInst);
363
321
    if (!InsertResult.second)
364
113
      continue;
365
208
366
208
    // Add all operands to the worklists.
367
208
    PHINode *PHI = dyn_cast<PHINode>(Inst);
368
208
    if (PHI && 
PHI->getParent() == Stmt->getEntryBlock()9
) {
369
7
      if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
370
7
        WorklistAccs.push_back(PHIRead);
371
201
    } else {
372
201
      for (VirtualUse VUse : VInst.operands())
373
360
        AddToWorklist(VUse);
374
201
    }
375
208
376
208
    // If there is an array access, also add its MemoryAccesses to the worklist.
377
208
    const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
378
208
    if (!Accs)
379
61
      continue;
380
147
381
147
    for (MemoryAccess *Acc : *Accs)
382
147
      WorklistAccs.push_back(Acc);
383
365
  }
384
44
}
385
386
void polly::markReachable(Scop *S, LoopInfo *LI,
387
                          DenseSet<VirtualInstruction> &UsedInsts,
388
                          DenseSet<MemoryAccess *> &UsedAccs,
389
44
                          ScopStmt *OnlyLocal) {
390
44
  SmallVector<VirtualInstruction, 32> RootInsts;
391
44
  SmallVector<MemoryAccess *, 32> RootAccs;
392
44
393
44
  if (OnlyLocal) {
394
0
    addRoots(OnlyLocal, RootInsts, RootAccs, true);
395
44
  } else {
396
44
    for (auto &Stmt : *S)
397
67
      addRoots(&Stmt, RootInsts, RootAccs, false);
398
44
  }
399
44
400
44
  walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);
401
44
}