Coverage Report

Created: 2018-06-24 14:39

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/polly/lib/Support/VirtualInstruction.cpp
Line
Count
Source (jump to first uncovered line)
1
//===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
// Tools for determining which instructions are within a statement and the
11
// nature of their operands.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "polly/Support/VirtualInstruction.h"
16
#include "polly/Support/SCEVValidator.h"
17
18
using namespace polly;
19
using namespace llvm;
20
21
VirtualUse VirtualUse::create(Scop *S, const Use &U, LoopInfo *LI,
22
0
                              bool Virtual) {
23
0
  auto *UserBB = getUseBlock(U);
24
0
  Loop *UserScope = LI->getLoopFor(UserBB);
25
0
  Instruction *UI = dyn_cast<Instruction>(U.getUser());
26
0
  ScopStmt *UserStmt = S->getStmtFor(UI);
27
0
28
0
  // Uses by PHI nodes are always reading values written by other statements,
29
0
  // except it is within a region statement.
30
0
  if (PHINode *PHI = dyn_cast<PHINode>(UI)) {
31
0
    // Handle PHI in exit block.
32
0
    if (S->getRegion().getExit() == PHI->getParent())
33
0
      return VirtualUse(UserStmt, U.get(), Inter, nullptr, nullptr);
34
0
35
0
    if (UserStmt->getEntryBlock() != PHI->getParent())
36
0
      return VirtualUse(UserStmt, U.get(), Intra, nullptr, nullptr);
37
0
38
0
    // The MemoryAccess is expected to be set if @p Virtual is true.
39
0
    MemoryAccess *IncomingMA = nullptr;
40
0
    if (Virtual) {
41
0
      if (const ScopArrayInfo *SAI =
42
0
              S->getScopArrayInfoOrNull(PHI, MemoryKind::PHI)) {
43
0
        IncomingMA = S->getPHIRead(SAI);
44
0
        assert(IncomingMA->getStatement() == UserStmt);
45
0
      }
46
0
    }
47
0
48
0
    return VirtualUse(UserStmt, U.get(), Inter, nullptr, IncomingMA);
49
0
  }
50
0
51
0
  return create(S, UserStmt, UserScope, U.get(), Virtual);
52
0
}
53
54
VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
55
17.6k
                              Value *Val, bool Virtual) {
56
17.6k
  assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
57
17.6k
58
17.6k
  if (isa<BasicBlock>(Val))
59
656
    return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
60
16.9k
61
16.9k
  if (isa<llvm::Constant>(Val) || 
isa<MetadataAsValue>(Val)13.2k
)
62
3.73k
    return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
63
13.2k
64
13.2k
  // Is the value synthesizable? If the user has been pruned
65
13.2k
  // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
66
13.2k
  // We assume synthesizable which practically should have the same effect.
67
13.2k
  auto *SE = S->getSE();
68
13.2k
  if (SE->isSCEVable(Val->getType())) {
69
10.4k
    auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
70
10.4k
    if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
71
7.70k
      return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
72
5.50k
  }
73
5.50k
74
5.50k
  // FIXME: Inconsistency between lookupInvariantEquivClass and
75
5.50k
  // getRequiredInvariantLoads. Querying one of them should be enough.
76
5.50k
  auto &RIL = S->getRequiredInvariantLoads();
77
5.50k
  if (S->lookupInvariantEquivClass(Val) || 
RIL.count(dyn_cast<LoadInst>(Val))5.45k
)
78
47
    return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
79
5.45k
80
5.45k
  // ReadOnly uses may have MemoryAccesses that we want to associate with the
81
5.45k
  // use. This is why we look for a MemoryAccess here already.
82
5.45k
  MemoryAccess *InputMA = nullptr;
83
5.45k
  if (UserStmt && Virtual)
84
1.47k
    InputMA = UserStmt->lookupValueReadOf(Val);
85
5.45k
86
5.45k
  // Uses are read-only if they have been defined before the SCoP, i.e., they
87
5.45k
  // cannot be written to inside the SCoP. Arguments are defined before any
88
5.45k
  // instructions, hence also before the SCoP. If the user has been pruned
89
5.45k
  // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
90
5.45k
  // neither an intra- nor an inter-use.
91
5.45k
  if (!UserStmt || isa<Argument>(Val))
92
80
    return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
93
5.37k
94
5.37k
  auto Inst = cast<Instruction>(Val);
95
5.37k
  if (!S->contains(Inst))
96
15
    return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
97
5.36k
98
5.36k
  // A use is inter-statement if either it is defined in another statement, or
99
5.36k
  // there is a MemoryAccess that reads its value that has been written by
100
5.36k
  // another statement.
101
5.36k
  if (InputMA || 
(5.11k
!Virtual5.11k
&&
UserStmt != S->getStmtFor(Inst)3.91k
))
102
581
    return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
103
4.78k
104
4.78k
  return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
105
4.78k
}
106
107
0
void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
108
0
  OS << "User: [" << User->getBaseName() << "] ";
109
0
  switch (Kind) {
110
0
  case VirtualUse::Constant:
111
0
    OS << "Constant Op:";
112
0
    break;
113
0
  case VirtualUse::Block:
114
0
    OS << "BasicBlock Op:";
115
0
    break;
116
0
  case VirtualUse::Synthesizable:
117
0
    OS << "Synthesizable Op:";
118
0
    break;
119
0
  case VirtualUse::Hoisted:
120
0
    OS << "Hoisted load Op:";
121
0
    break;
122
0
  case VirtualUse::ReadOnly:
123
0
    OS << "Read-Only Op:";
124
0
    break;
125
0
  case VirtualUse::Intra:
126
0
    OS << "Intra Op:";
127
0
    break;
128
0
  case VirtualUse::Inter:
129
0
    OS << "Inter Op:";
130
0
    break;
131
0
  }
132
0
133
0
  if (Val) {
134
0
    OS << ' ';
135
0
    if (Reproducible)
136
0
      OS << '"' << Val->getName() << '"';
137
0
    else
138
0
      Val->print(OS, true);
139
0
  }
140
0
  if (ScevExpr) {
141
0
    OS << ' ';
142
0
    ScevExpr->print(OS);
143
0
  }
144
0
  if (InputMA && !Reproducible)
145
0
    OS << ' ' << InputMA;
146
0
}
147
148
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
149
LLVM_DUMP_METHOD void VirtualUse::dump() const {
150
  print(errs(), false);
151
  errs() << '\n';
152
}
153
#endif
154
155
0
void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
156
0
  if (!Stmt || !Inst) {
157
0
    OS << "[null VirtualInstruction]";
158
0
    return;
159
0
  }
160
0
161
0
  OS << "[" << Stmt->getBaseName() << "]";
162
0
  Inst->print(OS, !Reproducible);
163
0
}
164
165
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
166
LLVM_DUMP_METHOD void VirtualInstruction::dump() const {
167
  print(errs(), false);
168
  errs() << '\n';
169
}
170
#endif
171
172
/// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
173
193
static bool isRoot(const Instruction *Inst) {
174
193
  // The store is handled by its MemoryAccess. The load must be reached from the
175
193
  // roots in order to be marked as used.
176
193
  if (isa<LoadInst>(Inst) || 
isa<StoreInst>(Inst)148
)
177
162
    return false;
178
31
179
31
  // Terminator instructions (in region statements) are required for control
180
31
  // flow.
181
31
  if (isa<TerminatorInst>(Inst))
182
0
    return true;
183
31
184
31
  // Writes to memory must be honored.
185
31
  if (Inst->mayWriteToMemory())
186
0
    return true;
187
31
188
31
  return false;
189
31
}
190
191
/// Return true for MemoryAccesses that cannot be removed because it represents
192
/// an llvm::Value that is used after the SCoP.
193
9
static bool isEscaping(MemoryAccess *MA) {
194
9
  assert(MA->isOriginalValueKind());
195
9
  Scop *S = MA->getStatement()->getParent();
196
9
  return S->isEscaping(cast<Instruction>(MA->getAccessValue()));
197
9
}
198
199
/// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
200
static void
201
addInstructionRoots(ScopStmt *Stmt,
202
98
                    SmallVectorImpl<VirtualInstruction> &RootInsts) {
203
98
  if (!Stmt->isBlockStmt()) {
204
9
    // In region statements the terminator statement and all statements that
205
9
    // are not in the entry block cannot be eliminated and consequently must
206
9
    // be roots.
207
9
    RootInsts.emplace_back(Stmt,
208
9
                           Stmt->getRegion()->getEntry()->getTerminator());
209
9
    for (BasicBlock *BB : Stmt->getRegion()->blocks())
210
25
      if (Stmt->getRegion()->getEntry() != BB)
211
16
        for (Instruction &Inst : *BB)
212
26
          RootInsts.emplace_back(Stmt, &Inst);
213
9
    return;
214
9
  }
215
89
216
89
  for (Instruction *Inst : Stmt->getInstructions())
217
193
    if (isRoot(Inst))
218
0
      RootInsts.emplace_back(Stmt, Inst);
219
89
}
220
221
/// Add non-removable memory accesses in @p Stmt to @p RootInsts.
222
///
223
/// @param Local If true, all writes are assumed to escape. markAndSweep
224
/// algorithms can use this to be applicable to a single ScopStmt only without
225
/// the risk of removing definitions required by other statements.
226
///              If false, only writes for SCoP-escaping values are roots.  This
227
///              is global mode, where such writes must be marked by theirs uses
228
///              in order to be reachable.
229
static void addAccessRoots(ScopStmt *Stmt,
230
                           SmallVectorImpl<MemoryAccess *> &RootAccs,
231
98
                           bool Local) {
232
206
  for (auto *MA : *Stmt) {
233
206
    if (!MA->isWrite())
234
80
      continue;
235
126
236
126
    // Writes to arrays are always used.
237
126
    if (MA->isLatestArrayKind())
238
113
      RootAccs.push_back(MA);
239
13
240
13
    // Values are roots if they are escaping.
241
13
    else if (MA->isLatestValueKind()) {
242
9
      if (Local || isEscaping(MA))
243
3
        RootAccs.push_back(MA);
244
9
    }
245
4
246
4
    // Exit phis are, by definition, escaping.
247
4
    else if (MA->isLatestExitPHIKind())
248
0
      RootAccs.push_back(MA);
249
4
250
4
    // phi writes are only roots if we are not visiting the statement
251
4
    // containing the PHINode.
252
4
    else if (Local && 
MA->isLatestPHIKind()0
)
253
0
      RootAccs.push_back(MA);
254
126
  }
255
98
}
256
257
/// Determine all instruction and access roots.
258
static void addRoots(ScopStmt *Stmt,
259
                     SmallVectorImpl<VirtualInstruction> &RootInsts,
260
98
                     SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
261
98
  addInstructionRoots(Stmt, RootInsts);
262
98
  addAccessRoots(Stmt, RootAccs, Local);
263
98
}
264
265
/// Mark accesses and instructions as used if they are reachable from a root,
266
/// walking the operand trees.
267
///
268
/// @param S              The SCoP to walk.
269
/// @param LI             The LoopInfo Analysis.
270
/// @param RootInsts      List of root instructions.
271
/// @param RootAccs       List of root accesses.
272
/// @param UsesInsts[out] Receives all reachable instructions, including the
273
/// roots.
274
/// @param UsedAccs[out]  Receives all reachable accesses, including the roots.
275
/// @param OnlyLocal      If non-nullptr, restricts walking to a single
276
/// statement.
277
static void walkReachable(Scop *S, LoopInfo *LI,
278
                          ArrayRef<VirtualInstruction> RootInsts,
279
                          ArrayRef<MemoryAccess *> RootAccs,
280
                          DenseSet<VirtualInstruction> &UsedInsts,
281
                          DenseSet<MemoryAccess *> &UsedAccs,
282
44
                          ScopStmt *OnlyLocal = nullptr) {
283
44
  UsedInsts.clear();
284
44
  UsedAccs.clear();
285
44
286
44
  SmallVector<VirtualInstruction, 32> WorklistInsts;
287
44
  SmallVector<MemoryAccess *, 32> WorklistAccs;
288
44
289
44
  WorklistInsts.append(RootInsts.begin(), RootInsts.end());
290
44
  WorklistAccs.append(RootAccs.begin(), RootAccs.end());
291
44
292
483
  auto AddToWorklist = [&](VirtualUse VUse) {
293
483
    switch (VUse.getKind()) {
294
483
    case VirtualUse::Block:
295
286
    case VirtualUse::Constant:
296
286
    case VirtualUse::Synthesizable:
297
286
    case VirtualUse::Hoisted:
298
286
      break;
299
286
    case VirtualUse::ReadOnly:
300
2
      // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
301
2
      // enabled.
302
2
      if (!VUse.getMemoryAccess())
303
0
        break;
304
2
      LLVM_FALLTHROUGH;
305
17
    case VirtualUse::Inter:
306
17
      assert(VUse.getMemoryAccess());
307
17
      WorklistAccs.push_back(VUse.getMemoryAccess());
308
17
      break;
309
180
    case VirtualUse::Intra:
310
180
      WorklistInsts.emplace_back(VUse.getUser(),
311
180
                                 cast<Instruction>(VUse.getValue()));
312
180
      break;
313
483
    }
314
483
  };
315
44
316
365
  while (true) {
317
365
    // We have two worklists to process: Only when the MemoryAccess worklist is
318
365
    // empty, we process the instruction worklist.
319
365
320
659
    while (!WorklistAccs.empty()) {
321
294
      auto *Acc = WorklistAccs.pop_back_val();
322
294
323
294
      ScopStmt *Stmt = Acc->getStatement();
324
294
      if (OnlyLocal && 
Stmt != OnlyLocal0
)
325
0
        continue;
326
294
327
294
      auto Inserted = UsedAccs.insert(Acc);
328
294
      if (!Inserted.second)
329
112
        continue;
330
182
331
182
      if (Acc->isRead()) {
332
60
        const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
333
60
334
60
        if (Acc->isLatestValueKind()) {
335
7
          MemoryAccess *DefAcc = S->getValueDef(SAI);
336
7
337
7
          // Accesses to read-only values do not have a definition.
338
7
          if (DefAcc)
339
5
            WorklistAccs.push_back(S->getValueDef(SAI));
340
7
        }
341
60
342
60
        if (Acc->isLatestAnyPHIKind()) {
343
2
          auto IncomingMAs = S->getPHIIncomings(SAI);
344
2
          WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
345
2
        }
346
60
      }
347
182
348
182
      if (Acc->isWrite()) {
349
122
        if (Acc->isOriginalValueKind() ||
350
122
            
(110
Acc->isOriginalArrayKind()110
&&
Acc->getAccessValue()106
)) {
351
118
          Loop *Scope = Stmt->getSurroundingLoop();
352
118
          VirtualUse VUse =
353
118
              VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
354
118
          AddToWorklist(VUse);
355
118
        }
356
122
357
122
        if (Acc->isOriginalAnyPHIKind()) {
358
5
          for (auto Incoming : Acc->getIncoming()) {
359
5
            VirtualUse VUse = VirtualUse::create(
360
5
                S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
361
5
            AddToWorklist(VUse);
362
5
          }
363
4
        }
364
122
365
122
        if (Acc->isOriginalArrayKind())
366
106
          WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
367
122
      }
368
182
    }
369
365
370
365
    // If both worklists are empty, stop walking.
371
365
    if (WorklistInsts.empty())
372
44
      break;
373
321
374
321
    VirtualInstruction VInst = WorklistInsts.pop_back_val();
375
321
    ScopStmt *Stmt = VInst.getStmt();
376
321
    Instruction *Inst = VInst.getInstruction();
377
321
378
321
    // Do not process statements other than the local.
379
321
    if (OnlyLocal && 
Stmt != OnlyLocal0
)
380
0
      continue;
381
321
382
321
    auto InsertResult = UsedInsts.insert(VInst);
383
321
    if (!InsertResult.second)
384
113
      continue;
385
208
386
208
    // Add all operands to the worklists.
387
208
    PHINode *PHI = dyn_cast<PHINode>(Inst);
388
208
    if (PHI && 
PHI->getParent() == Stmt->getEntryBlock()9
) {
389
7
      if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
390
7
        WorklistAccs.push_back(PHIRead);
391
201
    } else {
392
201
      for (VirtualUse VUse : VInst.operands())
393
360
        AddToWorklist(VUse);
394
201
    }
395
208
396
208
    // If there is an array access, also add its MemoryAccesses to the worklist.
397
208
    const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
398
208
    if (!Accs)
399
61
      continue;
400
147
401
147
    for (MemoryAccess *Acc : *Accs)
402
147
      WorklistAccs.push_back(Acc);
403
147
  }
404
44
}
405
406
void polly::markReachable(Scop *S, LoopInfo *LI,
407
                          DenseSet<VirtualInstruction> &UsedInsts,
408
                          DenseSet<MemoryAccess *> &UsedAccs,
409
44
                          ScopStmt *OnlyLocal) {
410
44
  SmallVector<VirtualInstruction, 32> RootInsts;
411
44
  SmallVector<MemoryAccess *, 32> RootAccs;
412
44
413
44
  if (OnlyLocal) {
414
0
    addRoots(OnlyLocal, RootInsts, RootAccs, true);
415
44
  } else {
416
44
    for (auto &Stmt : *S)
417
98
      addRoots(&Stmt, RootInsts, RootAccs, false);
418
44
  }
419
44
420
44
  walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);
421
44
}