Coverage Report

Created: 2018-10-23 09:19

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/polly/lib/Support/ScopHelper.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- ScopHelper.cpp - Some Helper Functions for Scop.  ------------------===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
// Small functions that help with Scop and LLVM-IR.
11
//
12
//===----------------------------------------------------------------------===//
13
14
#include "polly/Support/ScopHelper.h"
15
#include "polly/Options.h"
16
#include "polly/ScopInfo.h"
17
#include "polly/Support/SCEVValidator.h"
18
#include "llvm/Analysis/LoopInfo.h"
19
#include "llvm/Analysis/RegionInfo.h"
20
#include "llvm/Analysis/RegionInfoImpl.h"
21
#include "llvm/Analysis/ScalarEvolution.h"
22
#include "llvm/Analysis/ScalarEvolutionExpander.h"
23
#include "llvm/Analysis/ScalarEvolutionExpressions.h"
24
#include "llvm/IR/CFG.h"
25
#include "llvm/IR/IntrinsicInst.h"
26
#include "llvm/Support/Debug.h"
27
#include "llvm/Transforms/Utils/BasicBlockUtils.h"
28
29
using namespace llvm;
30
using namespace polly;
31
32
#define DEBUG_TYPE "polly-scop-helper"
33
34
static cl::opt<bool> PollyAllowErrorBlocks(
35
    "polly-allow-error-blocks",
36
    cl::desc("Allow to speculate on the execution of 'error blocks'."),
37
    cl::Hidden, cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory));
38
39
static cl::list<std::string> DebugFunctions(
40
    "polly-debug-func",
41
    cl::desc("Allow calls to the specified functions in SCoPs even if their "
42
             "side-effects are unknown. This can be used to do debug output in "
43
             "Polly-transformed code."),
44
    cl::Hidden, cl::ZeroOrMore, cl::CommaSeparated, cl::cat(PollyCategory));
45
46
// Ensures that there is just one predecessor to the entry node from outside the
47
// region.
48
// The identity of the region entry node is preserved.
49
static void simplifyRegionEntry(Region *R, DominatorTree *DT, LoopInfo *LI,
50
293
                                RegionInfo *RI) {
51
293
  BasicBlock *EnteringBB = R->getEnteringBlock();
52
293
  BasicBlock *Entry = R->getEntry();
53
293
54
293
  // Before (one of):
55
293
  //
56
293
  //                       \    /            //
57
293
  //                      EnteringBB         //
58
293
  //                        |    \------>    //
59
293
  //   \   /                |                //
60
293
  //   Entry <--\         Entry <--\         //
61
293
  //   /   \    /         /   \    /         //
62
293
  //        ....               ....          //
63
293
64
293
  // Create single entry edge if the region has multiple entry edges.
65
293
  if (!EnteringBB) {
66
6
    SmallVector<BasicBlock *, 4> Preds;
67
6
    for (BasicBlock *P : predecessors(Entry))
68
12
      if (!R->contains(P))
69
12
        Preds.push_back(P);
70
6
71
6
    BasicBlock *NewEntering =
72
6
        SplitBlockPredecessors(Entry, Preds, ".region_entering", DT, LI);
73
6
74
6
    if (RI) {
75
6
      // The exit block of predecessing regions must be changed to NewEntering
76
12
      for (BasicBlock *ExitPred : predecessors(NewEntering)) {
77
12
        Region *RegionOfPred = RI->getRegionFor(ExitPred);
78
12
        if (RegionOfPred->getExit() != Entry)
79
11
          continue;
80
1
81
2
        
while (1
!RegionOfPred->isTopLevelRegion() &&
82
2
               
RegionOfPred->getExit() == Entry1
) {
83
1
          RegionOfPred->replaceExit(NewEntering);
84
1
          RegionOfPred = RegionOfPred->getParent();
85
1
        }
86
1
      }
87
6
88
6
      // Make all ancestors use EnteringBB as entry; there might be edges to it
89
6
      Region *AncestorR = R->getParent();
90
6
      RI->setRegionFor(NewEntering, AncestorR);
91
11
      while (!AncestorR->isTopLevelRegion() && 
AncestorR->getEntry() == Entry6
) {
92
5
        AncestorR->replaceEntry(NewEntering);
93
5
        AncestorR = AncestorR->getParent();
94
5
      }
95
6
    }
96
6
97
6
    EnteringBB = NewEntering;
98
6
  }
99
293
  assert(R->getEnteringBlock() == EnteringBB);
100
293
101
293
  // After:
102
293
  //
103
293
  //    \    /       //
104
293
  //  EnteringBB     //
105
293
  //      |          //
106
293
  //      |          //
107
293
  //    Entry <--\   //
108
293
  //    /   \    /   //
109
293
  //         ....    //
110
293
}
111
112
// Ensure that the region has a single block that branches to the exit node.
113
static void simplifyRegionExit(Region *R, DominatorTree *DT, LoopInfo *LI,
114
293
                               RegionInfo *RI) {
115
293
  BasicBlock *ExitBB = R->getExit();
116
293
  BasicBlock *ExitingBB = R->getExitingBlock();
117
293
118
293
  // Before:
119
293
  //
120
293
  //   (Region)   ______/  //
121
293
  //      \  |   /         //
122
293
  //       ExitBB          //
123
293
  //       /    \          //
124
293
125
293
  if (!ExitingBB) {
126
59
    SmallVector<BasicBlock *, 4> Preds;
127
59
    for (BasicBlock *P : predecessors(ExitBB))
128
136
      if (R->contains(P))
129
120
        Preds.push_back(P);
130
59
131
59
    //  Preds[0] Preds[1]      otherBB //
132
59
    //         \  |  ________/         //
133
59
    //          \ | /                  //
134
59
    //           BB                    //
135
59
    ExitingBB =
136
59
        SplitBlockPredecessors(ExitBB, Preds, ".region_exiting", DT, LI);
137
59
    // Preds[0] Preds[1]      otherBB  //
138
59
    //        \  /           /         //
139
59
    // BB.region_exiting    /          //
140
59
    //                  \  /           //
141
59
    //                   BB            //
142
59
143
59
    if (RI)
144
59
      RI->setRegionFor(ExitingBB, R);
145
59
146
59
    // Change the exit of nested regions, but not the region itself,
147
59
    R->replaceExitRecursive(ExitingBB);
148
59
    R->replaceExit(ExitBB);
149
59
  }
150
293
  assert(ExitingBB == R->getExitingBlock());
151
293
152
293
  // After:
153
293
  //
154
293
  //     \   /                //
155
293
  //    ExitingBB     _____/  //
156
293
  //          \      /        //
157
293
  //           ExitBB         //
158
293
  //           /    \         //
159
293
}
160
161
void polly::simplifyRegion(Region *R, DominatorTree *DT, LoopInfo *LI,
162
293
                           RegionInfo *RI) {
163
293
  assert(R && !R->isTopLevelRegion());
164
293
  assert(!RI || RI == R->getRegionInfo());
165
293
  assert((!RI || DT) &&
166
293
         "RegionInfo requires DominatorTree to be updated as well");
167
293
168
293
  simplifyRegionEntry(R, DT, LI, RI);
169
293
  simplifyRegionExit(R, DT, LI, RI);
170
293
  assert(R->isSimple());
171
293
}
172
173
// Split the block into two successive blocks.
174
//
175
// Like llvm::SplitBlock, but also preserves RegionInfo
176
static BasicBlock *splitBlock(BasicBlock *Old, Instruction *SplitPt,
177
                              DominatorTree *DT, llvm::LoopInfo *LI,
178
14
                              RegionInfo *RI) {
179
14
  assert(Old && SplitPt);
180
14
181
14
  // Before:
182
14
  //
183
14
  //  \   /  //
184
14
  //   Old   //
185
14
  //  /   \  //
186
14
187
14
  BasicBlock *NewBlock = llvm::SplitBlock(Old, SplitPt, DT, LI);
188
14
189
14
  if (RI) {
190
0
    Region *R = RI->getRegionFor(Old);
191
0
    RI->setRegionFor(NewBlock, R);
192
0
  }
193
14
194
14
  // After:
195
14
  //
196
14
  //   \   /    //
197
14
  //    Old     //
198
14
  //     |      //
199
14
  //  NewBlock  //
200
14
  //   /   \    //
201
14
202
14
  return NewBlock;
203
14
}
204
205
void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, DominatorTree *DT,
206
14
                                     LoopInfo *LI, RegionInfo *RI) {
207
14
  // Find first non-alloca instruction. Every basic block has a non-alloca
208
14
  // instruction, as every well formed basic block has a terminator.
209
14
  BasicBlock::iterator I = EntryBlock->begin();
210
14
  while (isa<AllocaInst>(I))
211
0
    ++I;
212
14
213
14
  // splitBlock updates DT, LI and RI.
214
14
  splitBlock(EntryBlock, &*I, DT, LI, RI);
215
14
}
216
217
13
void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, Pass *P) {
218
13
  auto *DTWP = P->getAnalysisIfAvailable<DominatorTreeWrapperPass>();
219
13
  auto *DT = DTWP ? &DTWP->getDomTree() : 
nullptr0
;
220
13
  auto *LIWP = P->getAnalysisIfAvailable<LoopInfoWrapperPass>();
221
13
  auto *LI = LIWP ? &LIWP->getLoopInfo() : 
nullptr0
;
222
13
  RegionInfoPass *RIP = P->getAnalysisIfAvailable<RegionInfoPass>();
223
13
  RegionInfo *RI = RIP ? 
&RIP->getRegionInfo()0
: nullptr;
224
13
225
13
  // splitBlock updates DT, LI and RI.
226
13
  polly::splitEntryBlockForAlloca(EntryBlock, DT, LI, RI);
227
13
}
228
229
/// The SCEVExpander will __not__ generate any code for an existing SDiv/SRem
230
/// instruction but just use it, if it is referenced as a SCEVUnknown. We want
231
/// however to generate new code if the instruction is in the analyzed region
232
/// and we generate code outside/in front of that region. Hence, we generate the
233
/// code for the SDiv/SRem operands in front of the analyzed region and then
234
/// create a new SDiv/SRem operation there too.
235
struct ScopExpander : SCEVVisitor<ScopExpander, const SCEV *> {
236
  friend struct SCEVVisitor<ScopExpander, const SCEV *>;
237
238
  explicit ScopExpander(const Region &R, ScalarEvolution &SE,
239
                        const DataLayout &DL, const char *Name, ValueMapT *VMap,
240
                        BasicBlock *RTCBB)
241
      : Expander(SCEVExpander(SE, DL, Name)), SE(SE), Name(Name), R(R),
242
1.13k
        VMap(VMap), RTCBB(RTCBB) {}
243
244
1.13k
  Value *expandCodeFor(const SCEV *E, Type *Ty, Instruction *I) {
245
1.13k
    // If we generate code in the region we will immediately fall back to the
246
1.13k
    // SCEVExpander, otherwise we will stop at all unknowns in the SCEV and if
247
1.13k
    // needed replace them by copies computed in the entering block.
248
1.13k
    if (!R.contains(I))
249
1.13k
      E = visit(E);
250
1.13k
    return Expander.expandCodeFor(E, Ty, I);
251
1.13k
  }
252
253
3.80k
  const SCEV *visit(const SCEV *E) {
254
3.80k
    // Cache the expansion results for intermediate SCEV expressions. A SCEV
255
3.80k
    // expression can refer to an operand multiple times (e.g. "x*x), so
256
3.80k
    // a naive visitor takes exponential time.
257
3.80k
    if (SCEVCache.count(E))
258
53
      return SCEVCache[E];
259
3.75k
    const SCEV *Result = SCEVVisitor::visit(E);
260
3.75k
    SCEVCache[E] = Result;
261
3.75k
    return Result;
262
3.75k
  }
263
264
private:
265
  SCEVExpander Expander;
266
  ScalarEvolution &SE;
267
  const char *Name;
268
  const Region &R;
269
  ValueMapT *VMap;
270
  BasicBlock *RTCBB;
271
  DenseMap<const SCEV *, const SCEV *> SCEVCache;
272
273
  const SCEV *visitGenericInst(const SCEVUnknown *E, Instruction *Inst,
274
1.50k
                               Instruction *IP) {
275
1.50k
    if (!Inst || 
!R.contains(Inst)634
)
276
1.50k
      return E;
277
2
278
2
    assert(!Inst->mayThrow() && !Inst->mayReadOrWriteMemory() &&
279
2
           !isa<PHINode>(Inst));
280
2
281
2
    auto *InstClone = Inst->clone();
282
2
    for (auto &Op : Inst->operands()) {
283
2
      assert(SE.isSCEVable(Op->getType()));
284
2
      auto *OpSCEV = SE.getSCEV(Op);
285
2
      auto *OpClone = expandCodeFor(OpSCEV, Op->getType(), IP);
286
2
      InstClone->replaceUsesOfWith(Op, OpClone);
287
2
    }
288
2
289
2
    InstClone->setName(Name + Inst->getName());
290
2
    InstClone->insertBefore(IP);
291
2
    return SE.getSCEV(InstClone);
292
2
  }
293
294
1.58k
  const SCEV *visitUnknown(const SCEVUnknown *E) {
295
1.58k
296
1.58k
    // If a value mapping was given try if the underlying value is remapped.
297
1.58k
    Value *NewVal = VMap ? 
VMap->lookup(E->getValue())1.57k
:
nullptr16
;
298
1.58k
    if (NewVal) {
299
82
      auto *NewE = SE.getSCEV(NewVal);
300
82
301
82
      // While the mapped value might be different the SCEV representation might
302
82
      // not be. To this end we will check before we go into recursion here.
303
82
      if (E != NewE)
304
79
        return visit(NewE);
305
1.50k
    }
306
1.50k
307
1.50k
    Instruction *Inst = dyn_cast<Instruction>(E->getValue());
308
1.50k
    Instruction *IP;
309
1.50k
    if (Inst && 
!R.contains(Inst)635
)
310
633
      IP = Inst;
311
874
    else if (Inst && 
RTCBB->getParent() == Inst->getFunction()2
)
312
2
      IP = RTCBB->getTerminator();
313
872
    else
314
872
      IP = RTCBB->getParent()->getEntryBlock().getTerminator();
315
1.50k
316
1.50k
    if (!Inst || 
(635
Inst->getOpcode() != Instruction::SRem635
&&
317
635
                  Inst->getOpcode() != Instruction::SDiv))
318
1.50k
      return visitGenericInst(E, Inst, IP);
319
1
320
1
    const SCEV *LHSScev = SE.getSCEV(Inst->getOperand(0));
321
1
    const SCEV *RHSScev = SE.getSCEV(Inst->getOperand(1));
322
1
323
1
    if (!SE.isKnownNonZero(RHSScev))
324
1
      RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1));
325
1
326
1
    Value *LHS = expandCodeFor(LHSScev, E->getType(), IP);
327
1
    Value *RHS = expandCodeFor(RHSScev, E->getType(), IP);
328
1
329
1
    Inst = BinaryOperator::Create((Instruction::BinaryOps)Inst->getOpcode(),
330
1
                                  LHS, RHS, Inst->getName() + Name, IP);
331
1
    return SE.getSCEV(Inst);
332
1
  }
333
334
  /// The following functions will just traverse the SCEV and rebuild it with
335
  /// the new operands returned by the traversal.
336
  ///
337
  ///{
338
920
  const SCEV *visitConstant(const SCEVConstant *E) { return E; }
339
74
  const SCEV *visitTruncateExpr(const SCEVTruncateExpr *E) {
340
74
    return SE.getTruncateExpr(visit(E->getOperand()), E->getType());
341
74
  }
342
22
  const SCEV *visitZeroExtendExpr(const SCEVZeroExtendExpr *E) {
343
22
    return SE.getZeroExtendExpr(visit(E->getOperand()), E->getType());
344
22
  }
345
36
  const SCEV *visitSignExtendExpr(const SCEVSignExtendExpr *E) {
346
36
    return SE.getSignExtendExpr(visit(E->getOperand()), E->getType());
347
36
  }
348
13
  const SCEV *visitUDivExpr(const SCEVUDivExpr *E) {
349
13
    auto *RHSScev = visit(E->getRHS());
350
13
    if (!SE.isKnownNonZero(RHSScev))
351
8
      RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1));
352
13
    return SE.getUDivExpr(visit(E->getLHS()), RHSScev);
353
13
  }
354
503
  const SCEV *visitAddExpr(const SCEVAddExpr *E) {
355
503
    SmallVector<const SCEV *, 4> NewOps;
356
503
    for (const SCEV *Op : E->operands())
357
1.20k
      NewOps.push_back(visit(Op));
358
503
    return SE.getAddExpr(NewOps);
359
503
  }
360
559
  const SCEV *visitMulExpr(const SCEVMulExpr *E) {
361
559
    SmallVector<const SCEV *, 4> NewOps;
362
559
    for (const SCEV *Op : E->operands())
363
1.15k
      NewOps.push_back(visit(Op));
364
559
    return SE.getMulExpr(NewOps);
365
559
  }
366
2
  const SCEV *visitUMaxExpr(const SCEVUMaxExpr *E) {
367
2
    SmallVector<const SCEV *, 4> NewOps;
368
2
    for (const SCEV *Op : E->operands())
369
4
      NewOps.push_back(visit(Op));
370
2
    return SE.getUMaxExpr(NewOps);
371
2
  }
372
0
  const SCEV *visitSMaxExpr(const SCEVSMaxExpr *E) {
373
0
    SmallVector<const SCEV *, 4> NewOps;
374
0
    for (const SCEV *Op : E->operands())
375
0
      NewOps.push_back(visit(Op));
376
0
    return SE.getSMaxExpr(NewOps);
377
0
  }
378
38
  const SCEV *visitAddRecExpr(const SCEVAddRecExpr *E) {
379
38
    SmallVector<const SCEV *, 4> NewOps;
380
38
    for (const SCEV *Op : E->operands())
381
76
      NewOps.push_back(visit(Op));
382
38
    return SE.getAddRecExpr(NewOps, E->getLoop(), E->getNoWrapFlags());
383
38
  }
384
  ///}
385
};
386
387
Value *polly::expandCodeFor(Scop &S, ScalarEvolution &SE, const DataLayout &DL,
388
                            const char *Name, const SCEV *E, Type *Ty,
389
                            Instruction *IP, ValueMapT *VMap,
390
1.13k
                            BasicBlock *RTCBB) {
391
1.13k
  ScopExpander Expander(S.getRegion(), SE, DL, Name, VMap, RTCBB);
392
1.13k
  return Expander.expandCodeFor(E, Ty, IP);
393
1.13k
}
394
395
bool polly::isErrorBlock(BasicBlock &BB, const Region &R, LoopInfo &LI,
396
104k
                         const DominatorTree &DT) {
397
104k
  if (!PollyAllowErrorBlocks)
398
0
    return false;
399
104k
400
104k
  if (isa<UnreachableInst>(BB.getTerminator()))
401
0
    return true;
402
104k
403
104k
  if (LI.isLoopHeader(&BB))
404
40.2k
    return false;
405
64.3k
406
64.3k
  // Basic blocks that are always executed are not considered error blocks,
407
64.3k
  // as their execution can not be a rare event.
408
64.3k
  bool DominatesAllPredecessors = true;
409
64.3k
  if (R.isTopLevelRegion()) {
410
117
    for (BasicBlock &I : *R.getEntry()->getParent())
411
634
      if (isa<ReturnInst>(I.getTerminator()) && 
!DT.dominates(&BB, &I)117
)
412
14
        DominatesAllPredecessors = false;
413
64.2k
  } else {
414
64.2k
    for (auto Pred : predecessors(R.getExit()))
415
98.3k
      if (R.contains(Pred) && 
!DT.dominates(&BB, Pred)92.5k
)
416
47.2k
        DominatesAllPredecessors = false;
417
64.2k
  }
418
64.3k
419
64.3k
  if (DominatesAllPredecessors)
420
22.8k
    return false;
421
41.4k
422
41.4k
  for (Instruction &Inst : BB)
423
371k
    if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
424
1.20k
      if (isDebugCall(CI))
425
8
        continue;
426
1.19k
427
1.19k
      if (isIgnoredIntrinsic(CI))
428
249
        continue;
429
946
430
946
      // memset, memcpy and memmove are modeled intrinsics.
431
946
      if (isa<MemSetInst>(CI) || 
isa<MemTransferInst>(CI)885
)
432
129
        continue;
433
817
434
817
      if (!CI->doesNotAccessMemory())
435
348
        return true;
436
469
      if (CI->doesNotReturn())
437
0
        return true;
438
469
    }
439
41.4k
440
41.4k
  
return false41.1k
;
441
41.4k
}
442
443
32.1k
Value *polly::getConditionFromTerminator(Instruction *TI) {
444
32.1k
  if (BranchInst *BR = dyn_cast<BranchInst>(TI)) {
445
32.0k
    if (BR->isUnconditional())
446
14.6k
      return ConstantInt::getTrue(Type::getInt1Ty(TI->getContext()));
447
17.4k
448
17.4k
    return BR->getCondition();
449
17.4k
  }
450
56
451
56
  if (SwitchInst *SI = dyn_cast<SwitchInst>(TI))
452
56
    return SI->getCondition();
453
0
454
0
  return nullptr;
455
0
}
456
457
static bool hasVariantIndex(GetElementPtrInst *Gep, Loop *L, Region &R,
458
944
                            ScalarEvolution &SE) {
459
1.22k
  for (const Use &Val : llvm::drop_begin(Gep->operands(), 1)) {
460
1.22k
    const SCEV *PtrSCEV = SE.getSCEVAtScope(Val, L);
461
1.22k
    Loop *OuterLoop = R.outermostLoopInRegion(L);
462
1.22k
    if (!SE.isLoopInvariant(PtrSCEV, OuterLoop))
463
69
      return true;
464
1.22k
  }
465
944
  
return false875
;
466
944
}
467
468
bool polly::isHoistableLoad(LoadInst *LInst, Region &R, LoopInfo &LI,
469
                            ScalarEvolution &SE, const DominatorTree &DT,
470
3.02k
                            const InvariantLoadsSetTy &KnownInvariantLoads) {
471
3.02k
  Loop *L = LI.getLoopFor(LInst->getParent());
472
3.02k
  auto *Ptr = LInst->getPointerOperand();
473
3.02k
474
3.02k
  // A LoadInst is hoistable if the address it is loading from is also
475
3.02k
  // invariant; in this case: another invariant load (whether that address
476
3.02k
  // is also not written to has to be checked separately)
477
3.02k
  // TODO: This only checks for a LoadInst->GetElementPtrInst->LoadInst
478
3.02k
  // pattern generated by the Chapel frontend, but generally this applies
479
3.02k
  // for any chain of instruction that does not also depend on any
480
3.02k
  // induction variable
481
3.02k
  if (auto *GepInst = dyn_cast<GetElementPtrInst>(Ptr)) {
482
944
    if (!hasVariantIndex(GepInst, L, R, SE)) {
483
875
      if (auto *DecidingLoad =
484
273
              dyn_cast<LoadInst>(GepInst->getPointerOperand())) {
485
273
        if (KnownInvariantLoads.count(DecidingLoad))
486
179
          return true;
487
2.84k
      }
488
875
    }
489
944
  }
490
2.84k
491
2.84k
  const SCEV *PtrSCEV = SE.getSCEVAtScope(Ptr, L);
492
3.19k
  while (L && 
R.contains(L)715
) {
493
369
    if (!SE.isLoopInvariant(PtrSCEV, L))
494
19
      return false;
495
350
    L = L->getParentLoop();
496
350
  }
497
2.84k
498
21.3k
  
for (auto *User : Ptr->users())2.82k
{
499
21.3k
    auto *UserI = dyn_cast<Instruction>(User);
500
21.3k
    if (!UserI || !R.contains(UserI))
501
3.06k
      continue;
502
18.2k
    if (!UserI->mayWriteToMemory())
503
17.8k
      continue;
504
396
505
396
    auto &BB = *UserI->getParent();
506
396
    if (DT.dominates(&BB, LInst->getParent()))
507
122
      return false;
508
274
509
274
    bool DominatesAllPredecessors = true;
510
274
    if (R.isTopLevelRegion()) {
511
1
      for (BasicBlock &I : *R.getEntry()->getParent())
512
5
        if (isa<ReturnInst>(I.getTerminator()) && 
!DT.dominates(&BB, &I)1
)
513
1
          DominatesAllPredecessors = false;
514
273
    } else {
515
273
      for (auto Pred : predecessors(R.getExit()))
516
532
        if (R.contains(Pred) && !DT.dominates(&BB, Pred))
517
498
          DominatesAllPredecessors = false;
518
273
    }
519
274
520
274
    if (!DominatesAllPredecessors)
521
274
      continue;
522
0
523
0
    return false;
524
0
  }
525
2.82k
526
2.82k
  
return true2.70k
;
527
2.82k
}
528
529
17.7k
bool polly::isIgnoredIntrinsic(const Value *V) {
530
17.7k
  if (auto *IT = dyn_cast<IntrinsicInst>(V)) {
531
621
    switch (IT->getIntrinsicID()) {
532
621
    // Lifetime markers are supported/ignored.
533
621
    case llvm::Intrinsic::lifetime_start:
534
307
    case llvm::Intrinsic::lifetime_end:
535
307
    // Invariant markers are supported/ignored.
536
307
    case llvm::Intrinsic::invariant_start:
537
307
    case llvm::Intrinsic::invariant_end:
538
307
    // Some misc annotations are supported/ignored.
539
307
    case llvm::Intrinsic::var_annotation:
540
307
    case llvm::Intrinsic::ptr_annotation:
541
307
    case llvm::Intrinsic::annotation:
542
307
    case llvm::Intrinsic::donothing:
543
307
    case llvm::Intrinsic::assume:
544
307
    // Some debug info intrinsics are supported/ignored.
545
307
    case llvm::Intrinsic::dbg_value:
546
307
    case llvm::Intrinsic::dbg_declare:
547
307
      return true;
548
314
    default:
549
314
      break;
550
17.4k
    }
551
17.4k
  }
552
17.4k
  return false;
553
17.4k
}
554
555
bool polly::canSynthesize(const Value *V, const Scop &S, ScalarEvolution *SE,
556
28.0k
                          Loop *Scope) {
557
28.0k
  if (!V || !SE->isSCEVable(V->getType()))
558
4.40k
    return false;
559
23.6k
560
23.6k
  const InvariantLoadsSetTy &ILS = S.getRequiredInvariantLoads();
561
23.6k
  if (const SCEV *Scev = SE->getSCEVAtScope(const_cast<Value *>(V), Scope))
562
23.6k
    if (!isa<SCEVCouldNotCompute>(Scev))
563
23.6k
      if (!hasScalarDepsInsideRegion(Scev, &S.getRegion(), Scope, false, ILS))
564
16.1k
        return true;
565
7.52k
566
7.52k
  return false;
567
7.52k
}
568
569
18.9k
llvm::BasicBlock *polly::getUseBlock(const llvm::Use &U) {
570
18.9k
  Instruction *UI = dyn_cast<Instruction>(U.getUser());
571
18.9k
  if (!UI)
572
0
    return nullptr;
573
18.9k
574
18.9k
  if (PHINode *PHI = dyn_cast<PHINode>(UI))
575
2.03k
    return PHI->getIncomingBlock(U);
576
16.9k
577
16.9k
  return UI->getParent();
578
16.9k
}
579
580
std::tuple<std::vector<const SCEV *>, std::vector<int>>
581
2.74k
polly::getIndexExpressionsFromGEP(GetElementPtrInst *GEP, ScalarEvolution &SE) {
582
2.74k
  std::vector<const SCEV *> Subscripts;
583
2.74k
  std::vector<int> Sizes;
584
2.74k
585
2.74k
  Type *Ty = GEP->getPointerOperandType();
586
2.74k
587
2.74k
  bool DroppedFirstDim = false;
588
2.74k
589
6.17k
  for (unsigned i = 1; i < GEP->getNumOperands(); 
i++3.42k
) {
590
3.54k
591
3.54k
    const SCEV *Expr = SE.getSCEV(GEP->getOperand(i));
592
3.54k
593
3.54k
    if (i == 1) {
594
2.74k
      if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
595
2.74k
        Ty = PtrTy->getElementType();
596
2.74k
      } else 
if (auto *0
ArrayTy0
= dyn_cast<ArrayType>(Ty)) {
597
0
        Ty = ArrayTy->getElementType();
598
0
      } else {
599
0
        Subscripts.clear();
600
0
        Sizes.clear();
601
0
        break;
602
0
      }
603
2.74k
      if (auto *Const = dyn_cast<SCEVConstant>(Expr))
604
801
        if (Const->getValue()->isZero()) {
605
588
          DroppedFirstDim = true;
606
588
          continue;
607
588
        }
608
2.15k
      Subscripts.push_back(Expr);
609
2.15k
      continue;
610
2.15k
    }
611
798
612
798
    auto *ArrayTy = dyn_cast<ArrayType>(Ty);
613
798
    if (!ArrayTy) {
614
113
      Subscripts.clear();
615
113
      Sizes.clear();
616
113
      break;
617
113
    }
618
685
619
685
    Subscripts.push_back(Expr);
620
685
    if (!(DroppedFirstDim && 
i == 2494
))
621
261
      Sizes.push_back(ArrayTy->getNumElements());
622
685
623
685
    Ty = ArrayTy->getElementType();
624
685
  }
625
2.74k
626
2.74k
  return std::make_tuple(Subscripts, Sizes);
627
2.74k
}
628
629
llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::Loop *L, llvm::LoopInfo &LI,
630
14.3k
                                           const BoxedLoopsSetTy &BoxedLoops) {
631
14.3k
  while (BoxedLoops.count(L))
632
42
    L = L->getParentLoop();
633
14.3k
  return L;
634
14.3k
}
635
636
llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::BasicBlock *BB,
637
                                           llvm::LoopInfo &LI,
638
14.3k
                                           const BoxedLoopsSetTy &BoxedLoops) {
639
14.3k
  Loop *L = LI.getLoopFor(BB);
640
14.3k
  return getFirstNonBoxedLoopFor(L, LI, BoxedLoops);
641
14.3k
}
642
643
1.27k
bool polly::isDebugCall(Instruction *Inst) {
644
1.27k
  auto *CI = dyn_cast<CallInst>(Inst);
645
1.27k
  if (!CI)
646
1
    return false;
647
1.27k
648
1.27k
  Function *CF = CI->getCalledFunction();
649
1.27k
  if (!CF)
650
5
    return false;
651
1.26k
652
1.26k
  return std::find(DebugFunctions.begin(), DebugFunctions.end(),
653
1.26k
                   CF->getName()) != DebugFunctions.end();
654
1.26k
}
655
656
0
static bool hasDebugCall(BasicBlock *BB) {
657
0
  for (Instruction &Inst : *BB) {
658
0
    if (isDebugCall(&Inst))
659
0
      return true;
660
0
  }
661
0
  return false;
662
0
}
663
664
11.1k
bool polly::hasDebugCall(ScopStmt *Stmt) {
665
11.1k
  // Quick skip if no debug functions have been defined.
666
11.1k
  if (DebugFunctions.empty())
667
11.1k
    return false;
668
7
669
7
  if (!Stmt)
670
0
    return false;
671
7
672
7
  for (Instruction *Inst : Stmt->getInstructions())
673
3
    if (isDebugCall(Inst))
674
2
      return true;
675
7
676
7
  
if (5
Stmt->isRegionStmt()5
) {
677
0
    for (BasicBlock *RBB : Stmt->getRegion()->blocks())
678
0
      if (RBB != Stmt->getEntryBlock() && ::hasDebugCall(RBB))
679
0
        return true;
680
0
  }
681
5
682
5
  return false;
683
5
}