/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/polly/lib/Support/VirtualInstruction.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // Tools for determining which instructions are within a statement and the |
10 | | // nature of their operands. |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "polly/Support/VirtualInstruction.h" |
15 | | |
16 | | using namespace polly; |
17 | | using namespace llvm; |
18 | | |
19 | | VirtualUse VirtualUse::create(Scop *S, const Use &U, LoopInfo *LI, |
20 | 0 | bool Virtual) { |
21 | 0 | auto *UserBB = getUseBlock(U); |
22 | 0 | Loop *UserScope = LI->getLoopFor(UserBB); |
23 | 0 | Instruction *UI = dyn_cast<Instruction>(U.getUser()); |
24 | 0 | ScopStmt *UserStmt = S->getStmtFor(UI); |
25 | 0 |
|
26 | 0 | // Uses by PHI nodes are always reading values written by other statements, |
27 | 0 | // except it is within a region statement. |
28 | 0 | if (PHINode *PHI = dyn_cast<PHINode>(UI)) { |
29 | 0 | // Handle PHI in exit block. |
30 | 0 | if (S->getRegion().getExit() == PHI->getParent()) |
31 | 0 | return VirtualUse(UserStmt, U.get(), Inter, nullptr, nullptr); |
32 | 0 | |
33 | 0 | if (UserStmt->getEntryBlock() != PHI->getParent()) |
34 | 0 | return VirtualUse(UserStmt, U.get(), Intra, nullptr, nullptr); |
35 | 0 | |
36 | 0 | // The MemoryAccess is expected to be set if @p Virtual is true. |
37 | 0 | MemoryAccess *IncomingMA = nullptr; |
38 | 0 | if (Virtual) { |
39 | 0 | if (const ScopArrayInfo *SAI = |
40 | 0 | S->getScopArrayInfoOrNull(PHI, MemoryKind::PHI)) { |
41 | 0 | IncomingMA = S->getPHIRead(SAI); |
42 | 0 | assert(IncomingMA->getStatement() == UserStmt); |
43 | 0 | } |
44 | 0 | } |
45 | 0 |
|
46 | 0 | return VirtualUse(UserStmt, U.get(), Inter, nullptr, IncomingMA); |
47 | 0 | } |
48 | 0 |
|
49 | 0 | return create(S, UserStmt, UserScope, U.get(), Virtual); |
50 | 0 | } |
51 | | |
52 | | VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope, |
53 | 17.8k | Value *Val, bool Virtual) { |
54 | 17.8k | assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used"); |
55 | 17.8k | |
56 | 17.8k | if (isa<BasicBlock>(Val)) |
57 | 670 | return VirtualUse(UserStmt, Val, Block, nullptr, nullptr); |
58 | 17.1k | |
59 | 17.1k | if (isa<llvm::Constant>(Val) || isa<MetadataAsValue>(Val)13.3k ) |
60 | 3.82k | return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr); |
61 | 13.3k | |
62 | 13.3k | // Is the value synthesizable? If the user has been pruned |
63 | 13.3k | // (UserStmt == nullptr), it is either not used anywhere or is synthesizable. |
64 | 13.3k | // We assume synthesizable which practically should have the same effect. |
65 | 13.3k | auto *SE = S->getSE(); |
66 | 13.3k | if (SE->isSCEVable(Val->getType())) { |
67 | 10.5k | auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope); |
68 | 10.5k | if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope)) |
69 | 7.79k | return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr); |
70 | 5.53k | } |
71 | 5.53k | |
72 | 5.53k | // FIXME: Inconsistency between lookupInvariantEquivClass and |
73 | 5.53k | // getRequiredInvariantLoads. Querying one of them should be enough. |
74 | 5.53k | auto &RIL = S->getRequiredInvariantLoads(); |
75 | 5.53k | if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val))5.48k ) |
76 | 47 | return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr); |
77 | 5.48k | |
78 | 5.48k | // ReadOnly uses may have MemoryAccesses that we want to associate with the |
79 | 5.48k | // use. This is why we look for a MemoryAccess here already. |
80 | 5.48k | MemoryAccess *InputMA = nullptr; |
81 | 5.48k | if (UserStmt && Virtual) |
82 | 1.48k | InputMA = UserStmt->lookupValueReadOf(Val); |
83 | 5.48k | |
84 | 5.48k | // Uses are read-only if they have been defined before the SCoP, i.e., they |
85 | 5.48k | // cannot be written to inside the SCoP. Arguments are defined before any |
86 | 5.48k | // instructions, hence also before the SCoP. If the user has been pruned |
87 | 5.48k | // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is |
88 | 5.48k | // neither an intra- nor an inter-use. |
89 | 5.48k | if (!UserStmt || isa<Argument>(Val)) |
90 | 84 | return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA); |
91 | 5.40k | |
92 | 5.40k | auto Inst = cast<Instruction>(Val); |
93 | 5.40k | if (!S->contains(Inst)) |
94 | 15 | return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA); |
95 | 5.38k | |
96 | 5.38k | // A use is inter-statement if either it is defined in another statement, or |
97 | 5.38k | // there is a MemoryAccess that reads its value that has been written by |
98 | 5.38k | // another statement. |
99 | 5.38k | if (InputMA || (5.13k !Virtual5.13k && UserStmt != S->getStmtFor(Inst)3.93k )) |
100 | 581 | return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA); |
101 | 4.80k | |
102 | 4.80k | return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr); |
103 | 4.80k | } |
104 | | |
105 | 0 | void VirtualUse::print(raw_ostream &OS, bool Reproducible) const { |
106 | 0 | OS << "User: [" << User->getBaseName() << "] "; |
107 | 0 | switch (Kind) { |
108 | 0 | case VirtualUse::Constant: |
109 | 0 | OS << "Constant Op:"; |
110 | 0 | break; |
111 | 0 | case VirtualUse::Block: |
112 | 0 | OS << "BasicBlock Op:"; |
113 | 0 | break; |
114 | 0 | case VirtualUse::Synthesizable: |
115 | 0 | OS << "Synthesizable Op:"; |
116 | 0 | break; |
117 | 0 | case VirtualUse::Hoisted: |
118 | 0 | OS << "Hoisted load Op:"; |
119 | 0 | break; |
120 | 0 | case VirtualUse::ReadOnly: |
121 | 0 | OS << "Read-Only Op:"; |
122 | 0 | break; |
123 | 0 | case VirtualUse::Intra: |
124 | 0 | OS << "Intra Op:"; |
125 | 0 | break; |
126 | 0 | case VirtualUse::Inter: |
127 | 0 | OS << "Inter Op:"; |
128 | 0 | break; |
129 | 0 | } |
130 | 0 | |
131 | 0 | if (Val) { |
132 | 0 | OS << ' '; |
133 | 0 | if (Reproducible) |
134 | 0 | OS << '"' << Val->getName() << '"'; |
135 | 0 | else |
136 | 0 | Val->print(OS, true); |
137 | 0 | } |
138 | 0 | if (ScevExpr) { |
139 | 0 | OS << ' '; |
140 | 0 | ScevExpr->print(OS); |
141 | 0 | } |
142 | 0 | if (InputMA && !Reproducible) |
143 | 0 | OS << ' ' << InputMA; |
144 | 0 | } |
145 | | |
146 | | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
147 | | LLVM_DUMP_METHOD void VirtualUse::dump() const { |
148 | | print(errs(), false); |
149 | | errs() << '\n'; |
150 | | } |
151 | | #endif |
152 | | |
153 | 0 | void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const { |
154 | 0 | if (!Stmt || !Inst) { |
155 | 0 | OS << "[null VirtualInstruction]"; |
156 | 0 | return; |
157 | 0 | } |
158 | 0 | |
159 | 0 | OS << "[" << Stmt->getBaseName() << "]"; |
160 | 0 | Inst->print(OS, !Reproducible); |
161 | 0 | } |
162 | | |
163 | | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
164 | | LLVM_DUMP_METHOD void VirtualInstruction::dump() const { |
165 | | print(errs(), false); |
166 | | errs() << '\n'; |
167 | | } |
168 | | #endif |
169 | | |
170 | | /// Return true if @p Inst cannot be removed, even if it is nowhere referenced. |
171 | 193 | static bool isRoot(const Instruction *Inst) { |
172 | 193 | // The store is handled by its MemoryAccess. The load must be reached from the |
173 | 193 | // roots in order to be marked as used. |
174 | 193 | if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst)148 ) |
175 | 162 | return false; |
176 | 31 | |
177 | 31 | // Terminator instructions (in region statements) are required for control |
178 | 31 | // flow. |
179 | 31 | if (Inst->isTerminator()) |
180 | 0 | return true; |
181 | 31 | |
182 | 31 | // Writes to memory must be honored. |
183 | 31 | if (Inst->mayWriteToMemory()) |
184 | 0 | return true; |
185 | 31 | |
186 | 31 | return false; |
187 | 31 | } |
188 | | |
189 | | /// Return true for MemoryAccesses that cannot be removed because it represents |
190 | | /// an llvm::Value that is used after the SCoP. |
191 | 9 | static bool isEscaping(MemoryAccess *MA) { |
192 | 9 | assert(MA->isOriginalValueKind()); |
193 | 9 | Scop *S = MA->getStatement()->getParent(); |
194 | 9 | return S->isEscaping(cast<Instruction>(MA->getAccessValue())); |
195 | 9 | } |
196 | | |
197 | | /// Add non-removable virtual instructions in @p Stmt to @p RootInsts. |
198 | | static void |
199 | | addInstructionRoots(ScopStmt *Stmt, |
200 | 98 | SmallVectorImpl<VirtualInstruction> &RootInsts) { |
201 | 98 | if (!Stmt->isBlockStmt()) { |
202 | 9 | // In region statements the terminator statement and all statements that |
203 | 9 | // are not in the entry block cannot be eliminated and consequently must |
204 | 9 | // be roots. |
205 | 9 | RootInsts.emplace_back(Stmt, |
206 | 9 | Stmt->getRegion()->getEntry()->getTerminator()); |
207 | 9 | for (BasicBlock *BB : Stmt->getRegion()->blocks()) |
208 | 25 | if (Stmt->getRegion()->getEntry() != BB) |
209 | 16 | for (Instruction &Inst : *BB) |
210 | 26 | RootInsts.emplace_back(Stmt, &Inst); |
211 | 9 | return; |
212 | 9 | } |
213 | 89 | |
214 | 89 | for (Instruction *Inst : Stmt->getInstructions()) |
215 | 193 | if (isRoot(Inst)) |
216 | 0 | RootInsts.emplace_back(Stmt, Inst); |
217 | 89 | } |
218 | | |
219 | | /// Add non-removable memory accesses in @p Stmt to @p RootInsts. |
220 | | /// |
221 | | /// @param Local If true, all writes are assumed to escape. markAndSweep |
222 | | /// algorithms can use this to be applicable to a single ScopStmt only without |
223 | | /// the risk of removing definitions required by other statements. |
224 | | /// If false, only writes for SCoP-escaping values are roots. This |
225 | | /// is global mode, where such writes must be marked by theirs uses |
226 | | /// in order to be reachable. |
227 | | static void addAccessRoots(ScopStmt *Stmt, |
228 | | SmallVectorImpl<MemoryAccess *> &RootAccs, |
229 | 98 | bool Local) { |
230 | 206 | for (auto *MA : *Stmt) { |
231 | 206 | if (!MA->isWrite()) |
232 | 80 | continue; |
233 | 126 | |
234 | 126 | // Writes to arrays are always used. |
235 | 126 | if (MA->isLatestArrayKind()) |
236 | 113 | RootAccs.push_back(MA); |
237 | 13 | |
238 | 13 | // Values are roots if they are escaping. |
239 | 13 | else if (MA->isLatestValueKind()) { |
240 | 9 | if (Local || isEscaping(MA)) |
241 | 3 | RootAccs.push_back(MA); |
242 | 9 | } |
243 | 4 | |
244 | 4 | // Exit phis are, by definition, escaping. |
245 | 4 | else if (MA->isLatestExitPHIKind()) |
246 | 0 | RootAccs.push_back(MA); |
247 | 4 | |
248 | 4 | // phi writes are only roots if we are not visiting the statement |
249 | 4 | // containing the PHINode. |
250 | 4 | else if (Local && MA->isLatestPHIKind()0 ) |
251 | 0 | RootAccs.push_back(MA); |
252 | 126 | } |
253 | 98 | } |
254 | | |
255 | | /// Determine all instruction and access roots. |
256 | | static void addRoots(ScopStmt *Stmt, |
257 | | SmallVectorImpl<VirtualInstruction> &RootInsts, |
258 | 98 | SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) { |
259 | 98 | addInstructionRoots(Stmt, RootInsts); |
260 | 98 | addAccessRoots(Stmt, RootAccs, Local); |
261 | 98 | } |
262 | | |
263 | | /// Mark accesses and instructions as used if they are reachable from a root, |
264 | | /// walking the operand trees. |
265 | | /// |
266 | | /// @param S The SCoP to walk. |
267 | | /// @param LI The LoopInfo Analysis. |
268 | | /// @param RootInsts List of root instructions. |
269 | | /// @param RootAccs List of root accesses. |
270 | | /// @param UsesInsts[out] Receives all reachable instructions, including the |
271 | | /// roots. |
272 | | /// @param UsedAccs[out] Receives all reachable accesses, including the roots. |
273 | | /// @param OnlyLocal If non-nullptr, restricts walking to a single |
274 | | /// statement. |
275 | | static void walkReachable(Scop *S, LoopInfo *LI, |
276 | | ArrayRef<VirtualInstruction> RootInsts, |
277 | | ArrayRef<MemoryAccess *> RootAccs, |
278 | | DenseSet<VirtualInstruction> &UsedInsts, |
279 | | DenseSet<MemoryAccess *> &UsedAccs, |
280 | 44 | ScopStmt *OnlyLocal = nullptr) { |
281 | 44 | UsedInsts.clear(); |
282 | 44 | UsedAccs.clear(); |
283 | 44 | |
284 | 44 | SmallVector<VirtualInstruction, 32> WorklistInsts; |
285 | 44 | SmallVector<MemoryAccess *, 32> WorklistAccs; |
286 | 44 | |
287 | 44 | WorklistInsts.append(RootInsts.begin(), RootInsts.end()); |
288 | 44 | WorklistAccs.append(RootAccs.begin(), RootAccs.end()); |
289 | 44 | |
290 | 483 | auto AddToWorklist = [&](VirtualUse VUse) { |
291 | 483 | switch (VUse.getKind()) { |
292 | 483 | case VirtualUse::Block: |
293 | 286 | case VirtualUse::Constant: |
294 | 286 | case VirtualUse::Synthesizable: |
295 | 286 | case VirtualUse::Hoisted: |
296 | 286 | break; |
297 | 286 | case VirtualUse::ReadOnly: |
298 | 2 | // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is |
299 | 2 | // enabled. |
300 | 2 | if (!VUse.getMemoryAccess()) |
301 | 0 | break; |
302 | 2 | LLVM_FALLTHROUGH; |
303 | 17 | case VirtualUse::Inter: |
304 | 17 | assert(VUse.getMemoryAccess()); |
305 | 17 | WorklistAccs.push_back(VUse.getMemoryAccess()); |
306 | 17 | break; |
307 | 180 | case VirtualUse::Intra: |
308 | 180 | WorklistInsts.emplace_back(VUse.getUser(), |
309 | 180 | cast<Instruction>(VUse.getValue())); |
310 | 180 | break; |
311 | 483 | } |
312 | 483 | }; |
313 | 44 | |
314 | 365 | while (true) { |
315 | 365 | // We have two worklists to process: Only when the MemoryAccess worklist is |
316 | 365 | // empty, we process the instruction worklist. |
317 | 365 | |
318 | 659 | while (!WorklistAccs.empty()) { |
319 | 294 | auto *Acc = WorklistAccs.pop_back_val(); |
320 | 294 | |
321 | 294 | ScopStmt *Stmt = Acc->getStatement(); |
322 | 294 | if (OnlyLocal && Stmt != OnlyLocal0 ) |
323 | 0 | continue; |
324 | 294 | |
325 | 294 | auto Inserted = UsedAccs.insert(Acc); |
326 | 294 | if (!Inserted.second) |
327 | 112 | continue; |
328 | 182 | |
329 | 182 | if (Acc->isRead()) { |
330 | 60 | const ScopArrayInfo *SAI = Acc->getScopArrayInfo(); |
331 | 60 | |
332 | 60 | if (Acc->isLatestValueKind()) { |
333 | 7 | MemoryAccess *DefAcc = S->getValueDef(SAI); |
334 | 7 | |
335 | 7 | // Accesses to read-only values do not have a definition. |
336 | 7 | if (DefAcc) |
337 | 5 | WorklistAccs.push_back(S->getValueDef(SAI)); |
338 | 7 | } |
339 | 60 | |
340 | 60 | if (Acc->isLatestAnyPHIKind()) { |
341 | 2 | auto IncomingMAs = S->getPHIIncomings(SAI); |
342 | 2 | WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end()); |
343 | 2 | } |
344 | 60 | } |
345 | 182 | |
346 | 182 | if (Acc->isWrite()) { |
347 | 122 | if (Acc->isOriginalValueKind() || |
348 | 122 | (110 Acc->isOriginalArrayKind()110 && Acc->getAccessValue()106 )) { |
349 | 118 | Loop *Scope = Stmt->getSurroundingLoop(); |
350 | 118 | VirtualUse VUse = |
351 | 118 | VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true); |
352 | 118 | AddToWorklist(VUse); |
353 | 118 | } |
354 | 122 | |
355 | 122 | if (Acc->isOriginalAnyPHIKind()) { |
356 | 5 | for (auto Incoming : Acc->getIncoming()) { |
357 | 5 | VirtualUse VUse = VirtualUse::create( |
358 | 5 | S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true); |
359 | 5 | AddToWorklist(VUse); |
360 | 5 | } |
361 | 4 | } |
362 | 122 | |
363 | 122 | if (Acc->isOriginalArrayKind()) |
364 | 106 | WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction()); |
365 | 122 | } |
366 | 182 | } |
367 | 365 | |
368 | 365 | // If both worklists are empty, stop walking. |
369 | 365 | if (WorklistInsts.empty()) |
370 | 44 | break; |
371 | 321 | |
372 | 321 | VirtualInstruction VInst = WorklistInsts.pop_back_val(); |
373 | 321 | ScopStmt *Stmt = VInst.getStmt(); |
374 | 321 | Instruction *Inst = VInst.getInstruction(); |
375 | 321 | |
376 | 321 | // Do not process statements other than the local. |
377 | 321 | if (OnlyLocal && Stmt != OnlyLocal0 ) |
378 | 0 | continue; |
379 | 321 | |
380 | 321 | auto InsertResult = UsedInsts.insert(VInst); |
381 | 321 | if (!InsertResult.second) |
382 | 113 | continue; |
383 | 208 | |
384 | 208 | // Add all operands to the worklists. |
385 | 208 | PHINode *PHI = dyn_cast<PHINode>(Inst); |
386 | 208 | if (PHI && PHI->getParent() == Stmt->getEntryBlock()9 ) { |
387 | 7 | if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI)) |
388 | 7 | WorklistAccs.push_back(PHIRead); |
389 | 201 | } else { |
390 | 201 | for (VirtualUse VUse : VInst.operands()) |
391 | 360 | AddToWorklist(VUse); |
392 | 201 | } |
393 | 208 | |
394 | 208 | // If there is an array access, also add its MemoryAccesses to the worklist. |
395 | 208 | const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst); |
396 | 208 | if (!Accs) |
397 | 61 | continue; |
398 | 147 | |
399 | 147 | for (MemoryAccess *Acc : *Accs) |
400 | 147 | WorklistAccs.push_back(Acc); |
401 | 147 | } |
402 | 44 | } |
403 | | |
404 | | void polly::markReachable(Scop *S, LoopInfo *LI, |
405 | | DenseSet<VirtualInstruction> &UsedInsts, |
406 | | DenseSet<MemoryAccess *> &UsedAccs, |
407 | 44 | ScopStmt *OnlyLocal) { |
408 | 44 | SmallVector<VirtualInstruction, 32> RootInsts; |
409 | 44 | SmallVector<MemoryAccess *, 32> RootAccs; |
410 | 44 | |
411 | 44 | if (OnlyLocal) { |
412 | 0 | addRoots(OnlyLocal, RootInsts, RootAccs, true); |
413 | 44 | } else { |
414 | 44 | for (auto &Stmt : *S) |
415 | 98 | addRoots(&Stmt, RootInsts, RootAccs, false); |
416 | 44 | } |
417 | 44 | |
418 | 44 | walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal); |
419 | 44 | } |