/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/IR/Value.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===-- Value.cpp - Implement the Value class -----------------------------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file implements the Value, ValueHandle, and User classes. |
10 | | // |
11 | | //===----------------------------------------------------------------------===// |
12 | | |
13 | | #include "llvm/IR/Value.h" |
14 | | #include "LLVMContextImpl.h" |
15 | | #include "llvm/ADT/DenseMap.h" |
16 | | #include "llvm/ADT/SmallString.h" |
17 | | #include "llvm/ADT/SetVector.h" |
18 | | #include "llvm/IR/Constant.h" |
19 | | #include "llvm/IR/Constants.h" |
20 | | #include "llvm/IR/DataLayout.h" |
21 | | #include "llvm/IR/DerivedTypes.h" |
22 | | #include "llvm/IR/DerivedUser.h" |
23 | | #include "llvm/IR/GetElementPtrTypeIterator.h" |
24 | | #include "llvm/IR/InstrTypes.h" |
25 | | #include "llvm/IR/Instructions.h" |
26 | | #include "llvm/IR/IntrinsicInst.h" |
27 | | #include "llvm/IR/Module.h" |
28 | | #include "llvm/IR/Operator.h" |
29 | | #include "llvm/IR/Statepoint.h" |
30 | | #include "llvm/IR/ValueHandle.h" |
31 | | #include "llvm/IR/ValueSymbolTable.h" |
32 | | #include "llvm/Support/Debug.h" |
33 | | #include "llvm/Support/ErrorHandling.h" |
34 | | #include "llvm/Support/ManagedStatic.h" |
35 | | #include "llvm/Support/raw_ostream.h" |
36 | | #include <algorithm> |
37 | | |
38 | | using namespace llvm; |
39 | | |
40 | | static cl::opt<unsigned> NonGlobalValueMaxNameSize( |
41 | | "non-global-value-max-name-size", cl::Hidden, cl::init(1024), |
42 | | cl::desc("Maximum size for the name of non-global values.")); |
43 | | |
44 | | //===----------------------------------------------------------------------===// |
45 | | // Value Class |
46 | | //===----------------------------------------------------------------------===// |
47 | 81.2M | static inline Type *checkType(Type *Ty) { |
48 | 81.2M | assert(Ty && "Value defined with a null type: Error!"); |
49 | 81.2M | return Ty; |
50 | 81.2M | } |
51 | | |
52 | | Value::Value(Type *ty, unsigned scid) |
53 | | : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), |
54 | | HasValueHandle(0), SubclassOptionalData(0), SubclassData(0), |
55 | 81.2M | NumUserOperands(0), IsUsedByMD(false), HasName(false) { |
56 | 81.2M | static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)"); |
57 | 81.2M | // FIXME: Why isn't this in the subclass gunk?? |
58 | 81.2M | // Note, we cannot call isa<CallInst> before the CallInst has been |
59 | 81.2M | // constructed. |
60 | 81.2M | if (SubclassID == Instruction::Call || SubclassID == Instruction::Invoke73.3M || |
61 | 81.2M | SubclassID == Instruction::CallBr72.5M ) |
62 | 81.2M | assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) && |
63 | 81.2M | "invalid CallInst type!"); |
64 | 81.2M | else if (72.1M SubclassID != BasicBlockVal72.1M && |
65 | 72.1M | (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal)64.6M ) |
66 | 72.1M | assert((VTy->isFirstClassType() || VTy->isVoidTy()) && |
67 | 81.2M | "Cannot create non-first-class values except for constants!"); |
68 | 81.2M | static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned), |
69 | 81.2M | "Value too big"); |
70 | 81.2M | } |
71 | | |
72 | 59.2M | Value::~Value() { |
73 | 59.2M | // Notify all ValueHandles (if present) that this value is going away. |
74 | 59.2M | if (HasValueHandle) |
75 | 1.56M | ValueHandleBase::ValueIsDeleted(this); |
76 | 59.2M | if (isUsedByMetadata()) |
77 | 97.5k | ValueAsMetadata::handleDeletion(this); |
78 | 59.2M | |
79 | | #ifndef NDEBUG // Only in -g mode... |
80 | | // Check to make sure that there are no uses of this value that are still |
81 | | // around when the value is destroyed. If there are, then we have a dangling |
82 | | // reference and something is wrong. This code is here to print out where |
83 | | // the value is still being referenced. |
84 | | // |
85 | | if (!use_empty()) { |
86 | | dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n"; |
87 | | for (auto *U : users()) |
88 | | dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n"; |
89 | | } |
90 | | #endif |
91 | | assert(use_empty() && "Uses remain when a value is destroyed!"); |
92 | 59.2M | |
93 | 59.2M | // If this value is named, destroy the name. This should not be in a symtab |
94 | 59.2M | // at this point. |
95 | 59.2M | destroyValueName(); |
96 | 59.2M | } |
97 | | |
98 | 49.1M | void Value::deleteValue() { |
99 | 49.1M | switch (getValueID()) { |
100 | 49.1M | #define HANDLE_VALUE(Name) \ |
101 | 49.1M | case Value::Name##Val: \ |
102 | 274k | delete static_cast<Name *>(this); \ |
103 | 274k | break; |
104 | 49.1M | #define HANDLE_MEMORY_VALUE(Name) \ |
105 | 49.1M | case Value::Name##Val: \ |
106 | 10.5M | static_cast<DerivedUser *>(this)->DeleteValue( \ |
107 | 10.5M | static_cast<DerivedUser *>(this)); \ |
108 | 10.5M | break; |
109 | 49.1M | #define HANDLE_INSTRUCTION(Name) /* nothing */ |
110 | 49.1M | #include "llvm/IR/Value.def" |
111 | 49.1M | |
112 | 49.1M | #define HANDLE_INST(N, OPC, CLASS) \ |
113 | 49.1M | case Value::InstructionVal + Instruction::OPC: \ |
114 | 38.4M | delete static_cast<CLASS *>(this); \ |
115 | 38.4M | break; |
116 | 49.1M | #define HANDLE_USER_INST(N, OPC, CLASS) |
117 | 49.1M | #include "llvm/IR/Instruction.def"1.22M |
118 | 49.1M | |
119 | 49.1M | default: |
120 | 0 | llvm_unreachable("attempting to delete unknown value kind"); |
121 | 49.1M | } |
122 | 49.1M | } |
123 | | |
124 | 62.7M | void Value::destroyValueName() { |
125 | 62.7M | ValueName *Name = getValueName(); |
126 | 62.7M | if (Name) |
127 | 10.9M | Name->Destroy(); |
128 | 62.7M | setValueName(nullptr); |
129 | 62.7M | } |
130 | | |
131 | 1.11M | bool Value::hasNUses(unsigned N) const { |
132 | 1.11M | return hasNItems(use_begin(), use_end(), N); |
133 | 1.11M | } |
134 | | |
135 | 60.9k | bool Value::hasNUsesOrMore(unsigned N) const { |
136 | 60.9k | return hasNItemsOrMore(use_begin(), use_end(), N); |
137 | 60.9k | } |
138 | | |
139 | 2.38M | bool Value::isUsedInBasicBlock(const BasicBlock *BB) const { |
140 | 2.38M | // This can be computed either by scanning the instructions in BB, or by |
141 | 2.38M | // scanning the use list of this Value. Both lists can be very long, but |
142 | 2.38M | // usually one is quite short. |
143 | 2.38M | // |
144 | 2.38M | // Scan both lists simultaneously until one is exhausted. This limits the |
145 | 2.38M | // search to the shorter list. |
146 | 2.38M | BasicBlock::const_iterator BI = BB->begin(), BE = BB->end(); |
147 | 2.38M | const_user_iterator UI = user_begin(), UE = user_end(); |
148 | 4.26M | for (; BI != BE && UI != UE4.14M ; ++BI, ++UI1.87M ) { |
149 | 4.01M | // Scan basic block: Check if this Value is used by the instruction at BI. |
150 | 4.01M | if (is_contained(BI->operands(), this)) |
151 | 389k | return true; |
152 | 3.62M | // Scan use list: Check if the use at UI is in BB. |
153 | 3.62M | const auto *User = dyn_cast<Instruction>(*UI); |
154 | 3.62M | if (User && User->getParent() == BB) |
155 | 1.74M | return true; |
156 | 3.62M | } |
157 | 2.38M | return false251k ; |
158 | 2.38M | } |
159 | | |
160 | 1.74k | unsigned Value::getNumUses() const { |
161 | 1.74k | return (unsigned)std::distance(use_begin(), use_end()); |
162 | 1.74k | } |
163 | | |
164 | 18.1M | static bool getSymTab(Value *V, ValueSymbolTable *&ST) { |
165 | 18.1M | ST = nullptr; |
166 | 18.1M | if (Instruction *I = dyn_cast<Instruction>(V)) { |
167 | 10.0M | if (BasicBlock *P = I->getParent()) |
168 | 9.15M | if (Function *PP = P->getParent()) |
169 | 9.15M | ST = PP->getValueSymbolTable(); |
170 | 10.0M | } else if (BasicBlock *8.17M BB8.17M = dyn_cast<BasicBlock>(V)) { |
171 | 3.13M | if (Function *P = BB->getParent()) |
172 | 2.69M | ST = P->getValueSymbolTable(); |
173 | 5.04M | } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) { |
174 | 2.81M | if (Module *P = GV->getParent()) |
175 | 1.45M | ST = &P->getValueSymbolTable(); |
176 | 2.81M | } else if (Argument *2.22M A2.22M = dyn_cast<Argument>(V)) { |
177 | 2.22M | if (Function *P = A->getParent()) |
178 | 2.20M | ST = P->getValueSymbolTable(); |
179 | 2.22M | } else { |
180 | 1.31k | assert(isa<Constant>(V) && "Unknown value type!"); |
181 | 1.31k | return true; // no name is setable for this. |
182 | 1.31k | } |
183 | 18.1M | return false; |
184 | 18.1M | } |
185 | | |
186 | 557M | ValueName *Value::getValueName() const { |
187 | 557M | if (!HasName) return nullptr51.7M ; |
188 | 506M | |
189 | 506M | LLVMContext &Ctx = getContext(); |
190 | 506M | auto I = Ctx.pImpl->ValueNames.find(this); |
191 | 506M | assert(I != Ctx.pImpl->ValueNames.end() && |
192 | 506M | "No name entry found!"); |
193 | 506M | |
194 | 506M | return I->second; |
195 | 506M | } |
196 | | |
197 | 80.2M | void Value::setValueName(ValueName *VN) { |
198 | 80.2M | LLVMContext &Ctx = getContext(); |
199 | 80.2M | |
200 | 80.2M | assert(HasName == Ctx.pImpl->ValueNames.count(this) && |
201 | 80.2M | "HasName bit out of sync!"); |
202 | 80.2M | |
203 | 80.2M | if (!VN) { |
204 | 62.9M | if (HasName) |
205 | 11.1M | Ctx.pImpl->ValueNames.erase(this); |
206 | 62.9M | HasName = false; |
207 | 62.9M | return; |
208 | 62.9M | } |
209 | 17.3M | |
210 | 17.3M | HasName = true; |
211 | 17.3M | Ctx.pImpl->ValueNames[this] = VN; |
212 | 17.3M | } |
213 | | |
214 | 512M | StringRef Value::getName() const { |
215 | 512M | // Make sure the empty string is still a C string. For historical reasons, |
216 | 512M | // some clients want to call .data() on the result and expect it to be null |
217 | 512M | // terminated. |
218 | 512M | if (!hasName()) |
219 | 34.1M | return StringRef("", 0); |
220 | 478M | return getValueName()->getKey(); |
221 | 478M | } |
222 | | |
223 | 82.0M | void Value::setNameImpl(const Twine &NewName) { |
224 | 82.0M | // Fast-path: LLVMContext can be set to strip out non-GlobalValue names |
225 | 82.0M | if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this)43.5M ) |
226 | 42.9M | return; |
227 | 39.0M | |
228 | 39.0M | // Fast path for common IRBuilder case of setName("") when there is no name. |
229 | 39.0M | if (NewName.isTriviallyEmpty() && !hasName()20.6M ) |
230 | 19.7M | return; |
231 | 19.2M | |
232 | 19.2M | SmallString<256> NameData; |
233 | 19.2M | StringRef NameRef = NewName.toStringRef(NameData); |
234 | 19.2M | assert(NameRef.find_first_of(0) == StringRef::npos && |
235 | 19.2M | "Null bytes are not allowed in names"); |
236 | 19.2M | |
237 | 19.2M | // Name isn't changing? |
238 | 19.2M | if (getName() == NameRef) |
239 | 1.50M | return; |
240 | 17.7M | |
241 | 17.7M | // Cap the size of non-GlobalValue names. |
242 | 17.7M | if (NameRef.size() > NonGlobalValueMaxNameSize && !isa<GlobalValue>(this)19 ) |
243 | 13 | NameRef = |
244 | 13 | NameRef.substr(0, std::max(1u, (unsigned)NonGlobalValueMaxNameSize)); |
245 | 17.7M | |
246 | 17.7M | assert(!getType()->isVoidTy() && "Cannot assign a name to void values!"); |
247 | 17.7M | |
248 | 17.7M | // Get the symbol table to update for this object. |
249 | 17.7M | ValueSymbolTable *ST; |
250 | 17.7M | if (getSymTab(this, ST)) |
251 | 1.31k | return; // Cannot set a name on this value (e.g. constant). |
252 | 17.7M | |
253 | 17.7M | if (!ST) { // No symbol table to update? Just do the change. |
254 | 2.51M | if (NameRef.empty()) { |
255 | 0 | // Free the name for this value. |
256 | 0 | destroyValueName(); |
257 | 0 | return; |
258 | 0 | } |
259 | 2.51M | |
260 | 2.51M | // NOTE: Could optimize for the case the name is shrinking to not deallocate |
261 | 2.51M | // then reallocated. |
262 | 2.51M | destroyValueName(); |
263 | 2.51M | |
264 | 2.51M | // Create the new name. |
265 | 2.51M | setValueName(ValueName::Create(NameRef)); |
266 | 2.51M | getValueName()->setValue(this); |
267 | 2.51M | return; |
268 | 2.51M | } |
269 | 15.2M | |
270 | 15.2M | // NOTE: Could optimize for the case the name is shrinking to not deallocate |
271 | 15.2M | // then reallocated. |
272 | 15.2M | if (hasName()) { |
273 | 893k | // Remove old name. |
274 | 893k | ST->removeValueName(getValueName()); |
275 | 893k | destroyValueName(); |
276 | 893k | |
277 | 893k | if (NameRef.empty()) |
278 | 876k | return; |
279 | 14.3M | } |
280 | 14.3M | |
281 | 14.3M | // Name is changing to something new. |
282 | 14.3M | setValueName(ST->createValueName(NameRef, this)); |
283 | 14.3M | } |
284 | | |
285 | 82.0M | void Value::setName(const Twine &NewName) { |
286 | 82.0M | setNameImpl(NewName); |
287 | 82.0M | if (Function *F = dyn_cast<Function>(this)) |
288 | 3.31M | F->recalculateIntrinsicID(); |
289 | 82.0M | } |
290 | | |
291 | 2.49M | void Value::takeName(Value *V) { |
292 | 2.49M | ValueSymbolTable *ST = nullptr; |
293 | 2.49M | // If this value has a name, drop it. |
294 | 2.49M | if (hasName()) { |
295 | 30.4k | // Get the symtab this is in. |
296 | 30.4k | if (getSymTab(this, ST)) { |
297 | 0 | // We can't set a name on this value, but we need to clear V's name if |
298 | 0 | // it has one. |
299 | 0 | if (V->hasName()) V->setName(""); |
300 | 0 | return; // Cannot set a name on this value (e.g. constant). |
301 | 0 | } |
302 | 30.4k | |
303 | 30.4k | // Remove old name. |
304 | 30.4k | if (ST) |
305 | 6.50k | ST->removeValueName(getValueName()); |
306 | 30.4k | destroyValueName(); |
307 | 30.4k | } |
308 | 2.49M | |
309 | 2.49M | // Now we know that this has no name. |
310 | 2.49M | |
311 | 2.49M | // If V has no name either, we're done. |
312 | 2.49M | if (!V->hasName()) return2.30M ; |
313 | 196k | |
314 | 196k | // Get this's symtab if we didn't before. |
315 | 196k | if (!ST) { |
316 | 190k | if (getSymTab(this, ST)) { |
317 | 4 | // Clear V's name. |
318 | 4 | V->setName(""); |
319 | 4 | return; // Cannot set a name on this value (e.g. constant). |
320 | 4 | } |
321 | 196k | } |
322 | 196k | |
323 | 196k | // Get V's ST, this should always succed, because V has a name. |
324 | 196k | ValueSymbolTable *VST; |
325 | 196k | bool Failure = getSymTab(V, VST); |
326 | 196k | assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure; |
327 | 196k | |
328 | 196k | // If these values are both in the same symtab, we can do this very fast. |
329 | 196k | // This works even if both values have no symtab yet. |
330 | 196k | if (ST == VST) { |
331 | 40.5k | // Take the name! |
332 | 40.5k | setValueName(V->getValueName()); |
333 | 40.5k | V->setValueName(nullptr); |
334 | 40.5k | getValueName()->setValue(this); |
335 | 40.5k | return; |
336 | 40.5k | } |
337 | 156k | |
338 | 156k | // Otherwise, things are slightly more complex. Remove V's name from VST and |
339 | 156k | // then reinsert it into ST. |
340 | 156k | |
341 | 156k | if (VST) |
342 | 152k | VST->removeValueName(V->getValueName()); |
343 | 156k | setValueName(V->getValueName()); |
344 | 156k | V->setValueName(nullptr); |
345 | 156k | getValueName()->setValue(this); |
346 | 156k | |
347 | 156k | if (ST) |
348 | 5.00k | ST->reinsertValue(this); |
349 | 156k | } |
350 | | |
351 | 0 | void Value::assertModuleIsMaterializedImpl() const { |
352 | | #ifndef NDEBUG |
353 | | const GlobalValue *GV = dyn_cast<GlobalValue>(this); |
354 | | if (!GV) |
355 | | return; |
356 | | const Module *M = GV->getParent(); |
357 | | if (!M) |
358 | | return; |
359 | | assert(M->isMaterialized()); |
360 | | #endif |
361 | | } |
362 | | |
363 | | #ifndef NDEBUG |
364 | | static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr, |
365 | | Constant *C) { |
366 | | if (!Cache.insert(Expr).second) |
367 | | return false; |
368 | | |
369 | | for (auto &O : Expr->operands()) { |
370 | | if (O == C) |
371 | | return true; |
372 | | auto *CE = dyn_cast<ConstantExpr>(O); |
373 | | if (!CE) |
374 | | continue; |
375 | | if (contains(Cache, CE, C)) |
376 | | return true; |
377 | | } |
378 | | return false; |
379 | | } |
380 | | |
381 | | static bool contains(Value *Expr, Value *V) { |
382 | | if (Expr == V) |
383 | | return true; |
384 | | |
385 | | auto *C = dyn_cast<Constant>(V); |
386 | | if (!C) |
387 | | return false; |
388 | | |
389 | | auto *CE = dyn_cast<ConstantExpr>(Expr); |
390 | | if (!CE) |
391 | | return false; |
392 | | |
393 | | SmallPtrSet<ConstantExpr *, 4> Cache; |
394 | | return contains(Cache, CE, C); |
395 | | } |
396 | | #endif // NDEBUG |
397 | | |
398 | 18.0M | void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) { |
399 | 18.0M | assert(New && "Value::replaceAllUsesWith(<null>) is invalid!"); |
400 | 18.0M | assert(!contains(New, this) && |
401 | 18.0M | "this->replaceAllUsesWith(expr(this)) is NOT valid!"); |
402 | 18.0M | assert(New->getType() == getType() && |
403 | 18.0M | "replaceAllUses of value with new value of different type!"); |
404 | 18.0M | |
405 | 18.0M | // Notify all ValueHandles (if present) that this value is going away. |
406 | 18.0M | if (HasValueHandle) |
407 | 859k | ValueHandleBase::ValueIsRAUWd(this, New); |
408 | 18.0M | if (ReplaceMetaUses == ReplaceMetadataUses::Yes18.0M && isUsedByMetadata()) |
409 | 9.89k | ValueAsMetadata::handleRAUW(this, New); |
410 | 18.0M | |
411 | 35.0M | while (!materialized_use_empty()) { |
412 | 16.9M | Use &U = *UseList; |
413 | 16.9M | // Must handle Constants specially, we cannot call replaceUsesOfWith on a |
414 | 16.9M | // constant because they are uniqued. |
415 | 16.9M | if (auto *C = dyn_cast<Constant>(U.getUser())) { |
416 | 29.5k | if (!isa<GlobalValue>(C)) { |
417 | 29.3k | C->handleOperandChange(this, New); |
418 | 29.3k | continue; |
419 | 29.3k | } |
420 | 16.9M | } |
421 | 16.9M | |
422 | 16.9M | U.set(New); |
423 | 16.9M | } |
424 | 18.0M | |
425 | 18.0M | if (BasicBlock *BB = dyn_cast<BasicBlock>(this)) |
426 | 2.94M | BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New)); |
427 | 18.0M | } |
428 | | |
429 | 18.0M | void Value::replaceAllUsesWith(Value *New) { |
430 | 18.0M | doRAUW(New, ReplaceMetadataUses::Yes); |
431 | 18.0M | } |
432 | | |
433 | 0 | void Value::replaceNonMetadataUsesWith(Value *New) { |
434 | 0 | doRAUW(New, ReplaceMetadataUses::No); |
435 | 0 | } |
436 | | |
437 | | // Like replaceAllUsesWith except it does not handle constants or basic blocks. |
438 | | // This routine leaves uses within BB. |
439 | 293 | void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) { |
440 | 293 | assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!"); |
441 | 293 | assert(!contains(New, this) && |
442 | 293 | "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!"); |
443 | 293 | assert(New->getType() == getType() && |
444 | 293 | "replaceUses of value with new value of different type!"); |
445 | 293 | assert(BB && "Basic block that may contain a use of 'New' must be defined\n"); |
446 | 293 | |
447 | 293 | use_iterator UI = use_begin(), E = use_end(); |
448 | 1.00k | for (; UI != E;) { |
449 | 710 | Use &U = *UI; |
450 | 710 | ++UI; |
451 | 710 | auto *Usr = dyn_cast<Instruction>(U.getUser()); |
452 | 710 | if (Usr && Usr->getParent() == BB) |
453 | 295 | continue; |
454 | 415 | U.set(New); |
455 | 415 | } |
456 | 293 | } |
457 | | |
458 | | namespace { |
459 | | // Various metrics for how much to strip off of pointers. |
460 | | enum PointerStripKind { |
461 | | PSK_ZeroIndices, |
462 | | PSK_ZeroIndicesAndAliases, |
463 | | PSK_ZeroIndicesAndAliasesSameRepresentation, |
464 | | PSK_ZeroIndicesAndAliasesAndInvariantGroups, |
465 | | PSK_InBoundsConstantIndices, |
466 | | PSK_InBounds |
467 | | }; |
468 | | |
469 | | template <PointerStripKind StripKind> |
470 | 383M | static const Value *stripPointerCastsAndOffsets(const Value *V) { |
471 | 383M | if (!V->getType()->isPointerTy()) |
472 | 4.73M | return V; |
473 | 379M | |
474 | 379M | // Even though we don't look through PHI nodes, we could be called on an |
475 | 379M | // instruction in an unreachable block, which may be on a cycle. |
476 | 379M | SmallPtrSet<const Value *, 4> Visited; |
477 | 379M | |
478 | 379M | Visited.insert(V); |
479 | 447M | do { |
480 | 447M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { |
481 | 149M | switch (StripKind) { |
482 | 149M | case PSK_ZeroIndicesAndAliases: |
483 | 56.9M | case PSK_ZeroIndicesAndAliasesSameRepresentation: |
484 | 149M | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: |
485 | 149M | case PSK_ZeroIndices: |
486 | 149M | if (!GEP->hasAllZeroIndices()) |
487 | 137M | return V; |
488 | 12.2M | break; |
489 | 12.2M | case PSK_InBoundsConstantIndices: |
490 | 16 | if (!GEP->hasAllConstantIndices()) |
491 | 0 | return V; |
492 | 16 | LLVM_FALLTHROUGH; |
493 | 57.7k | case PSK_InBounds: |
494 | 57.7k | if (!GEP->isInBounds()) |
495 | 968 | return V; |
496 | 56.7k | break; |
497 | 12.3M | } |
498 | 12.3M | V = GEP->getPointerOperand(); |
499 | 297M | } else if (Operator::getOpcode(V) == Instruction::BitCast) { |
500 | 55.7M | V = cast<Operator>(V)->getOperand(0); |
501 | 241M | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && |
502 | 241M | Operator::getOpcode(V) == Instruction::AddrSpaceCast238M ) { |
503 | 2.04k | // TODO: If we know an address space cast will not change the |
504 | 2.04k | // representation we could look through it here as well. |
505 | 2.04k | V = cast<Operator>(V)->getOperand(0); |
506 | 241M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { |
507 | 1.90k | if (StripKind == PSK_ZeroIndices || GA->isInterposable()842 ) |
508 | 1.18k | return V; |
509 | 724 | V = GA->getAliasee(); |
510 | 241M | } else { |
511 | 241M | if (const auto *Call = dyn_cast<CallBase>(V)) { |
512 | 12.2M | if (const Value *RV = Call->getReturnedArgOperand()) { |
513 | 2.49k | V = RV; |
514 | 2.49k | continue; |
515 | 2.49k | } |
516 | 12.2M | // The result of launder.invariant.group must alias it's argument, |
517 | 12.2M | // but it can't be marked with returned attribute, that's why it needs |
518 | 12.2M | // special case. |
519 | 12.2M | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && |
520 | 12.2M | (5.25M Call->getIntrinsicID() == Intrinsic::launder_invariant_group5.25M || |
521 | 5.25M | Call->getIntrinsicID() == Intrinsic::strip_invariant_group5.25M )) { |
522 | 103 | V = Call->getArgOperand(0); |
523 | 103 | continue; |
524 | 103 | } |
525 | 241M | } |
526 | 241M | return V; |
527 | 241M | } |
528 | 68.0M | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); |
529 | 68.0M | } while (Visited.insert(V).second); |
530 | 379M | |
531 | 379M | return V31 ; |
532 | 379M | } Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)1>(llvm::Value const*) Line | Count | Source | 470 | 163M | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 163M | if (!V->getType()->isPointerTy()) | 472 | 4.73M | return V; | 473 | 159M | | 474 | 159M | // Even though we don't look through PHI nodes, we could be called on an | 475 | 159M | // instruction in an unreachable block, which may be on a cycle. | 476 | 159M | SmallPtrSet<const Value *, 4> Visited; | 477 | 159M | | 478 | 159M | Visited.insert(V); | 479 | 182M | do { | 480 | 182M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 56.8M | switch (StripKind) { | 482 | 56.8M | case PSK_ZeroIndicesAndAliases: | 483 | 56.8M | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 56.8M | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 56.8M | case PSK_ZeroIndices: | 486 | 56.8M | if (!GEP->hasAllZeroIndices()) | 487 | 49.9M | return V; | 488 | 6.92M | break; | 489 | 6.92M | case PSK_InBoundsConstantIndices: | 490 | 0 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 0 | LLVM_FALLTHROUGH; | 493 | 0 | case PSK_InBounds: | 494 | 0 | if (!GEP->isInBounds()) | 495 | 0 | return V; | 496 | 0 | break; | 497 | 6.92M | } | 498 | 6.92M | V = GEP->getPointerOperand(); | 499 | 125M | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 16.1M | V = cast<Operator>(V)->getOperand(0); | 501 | 109M | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 109M | Operator::getOpcode(V) == Instruction::AddrSpaceCast) { | 503 | 1.69k | // TODO: If we know an address space cast will not change the | 504 | 1.69k | // representation we could look through it here as well. | 505 | 1.69k | V = cast<Operator>(V)->getOperand(0); | 506 | 109M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 620 | if (StripKind == PSK_ZeroIndices || GA->isInterposable()) | 508 | 117 | return V; | 509 | 503 | V = GA->getAliasee(); | 510 | 109M | } else { | 511 | 109M | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 6.01M | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 1.65k | V = RV; | 514 | 1.65k | continue; | 515 | 1.65k | } | 516 | 6.01M | // The result of launder.invariant.group must alias it's argument, | 517 | 6.01M | // but it can't be marked with returned attribute, that's why it needs | 518 | 6.01M | // special case. | 519 | 6.01M | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 6.01M | (0 Call->getIntrinsicID() == Intrinsic::launder_invariant_group0 || | 521 | 0 | Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { | 522 | 0 | V = Call->getArgOperand(0); | 523 | 0 | continue; | 524 | 0 | } | 525 | 109M | } | 526 | 109M | return V; | 527 | 109M | } | 528 | 23.1M | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 23.1M | } while (Visited.insert(V).second); | 530 | 159M | | 531 | 159M | return V11 ; | 532 | 159M | } |
Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)2>(llvm::Value const*) Line | Count | Source | 470 | 3.44M | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 3.44M | if (!V->getType()->isPointerTy()) | 472 | 0 | return V; | 473 | 3.44M | | 474 | 3.44M | // Even though we don't look through PHI nodes, we could be called on an | 475 | 3.44M | // instruction in an unreachable block, which may be on a cycle. | 476 | 3.44M | SmallPtrSet<const Value *, 4> Visited; | 477 | 3.44M | | 478 | 3.44M | Visited.insert(V); | 479 | 3.66M | do { | 480 | 3.66M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 105k | switch (StripKind) { | 482 | 105k | case PSK_ZeroIndicesAndAliases: | 483 | 105k | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 105k | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 105k | case PSK_ZeroIndices: | 486 | 105k | if (!GEP->hasAllZeroIndices()) | 487 | 39.4k | return V; | 488 | 65.9k | break; | 489 | 65.9k | case PSK_InBoundsConstantIndices: | 490 | 0 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 0 | LLVM_FALLTHROUGH; | 493 | 0 | case PSK_InBounds: | 494 | 0 | if (!GEP->isInBounds()) | 495 | 0 | return V; | 496 | 0 | break; | 497 | 65.9k | } | 498 | 65.9k | V = GEP->getPointerOperand(); | 499 | 3.55M | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 151k | V = cast<Operator>(V)->getOperand(0); | 501 | 3.40M | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 3.40M | Operator::getOpcode(V) == Instruction::AddrSpaceCast0 ) { | 503 | 0 | // TODO: If we know an address space cast will not change the | 504 | 0 | // representation we could look through it here as well. | 505 | 0 | V = cast<Operator>(V)->getOperand(0); | 506 | 3.40M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 0 | if (StripKind == PSK_ZeroIndices || GA->isInterposable()) | 508 | 0 | return V; | 509 | 0 | V = GA->getAliasee(); | 510 | 3.40M | } else { | 511 | 3.40M | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 914k | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 206 | V = RV; | 514 | 206 | continue; | 515 | 206 | } | 516 | 914k | // The result of launder.invariant.group must alias it's argument, | 517 | 914k | // but it can't be marked with returned attribute, that's why it needs | 518 | 914k | // special case. | 519 | 914k | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 914k | (0 Call->getIntrinsicID() == Intrinsic::launder_invariant_group0 || | 521 | 0 | Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { | 522 | 0 | V = Call->getArgOperand(0); | 523 | 0 | continue; | 524 | 0 | } | 525 | 3.40M | } | 526 | 3.40M | return V; | 527 | 3.40M | } | 528 | 217k | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 218k | } while (Visited.insert(V).second); | 530 | 3.44M | | 531 | 3.44M | return V0 ; | 532 | 3.44M | } |
Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)0>(llvm::Value const*) Line | Count | Source | 470 | 13.2M | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 13.2M | if (!V->getType()->isPointerTy()) | 472 | 0 | return V; | 473 | 13.2M | | 474 | 13.2M | // Even though we don't look through PHI nodes, we could be called on an | 475 | 13.2M | // instruction in an unreachable block, which may be on a cycle. | 476 | 13.2M | SmallPtrSet<const Value *, 4> Visited; | 477 | 13.2M | | 478 | 13.2M | Visited.insert(V); | 479 | 13.7M | do { | 480 | 13.7M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 35.3k | switch (StripKind) { | 482 | 35.3k | case PSK_ZeroIndicesAndAliases: | 483 | 0 | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 0 | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 35.3k | case PSK_ZeroIndices: | 486 | 35.3k | if (!GEP->hasAllZeroIndices()) | 487 | 11.5k | return V; | 488 | 23.8k | break; | 489 | 23.8k | case PSK_InBoundsConstantIndices: | 490 | 0 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 0 | LLVM_FALLTHROUGH; | 493 | 0 | case PSK_InBounds: | 494 | 0 | if (!GEP->isInBounds()) | 495 | 0 | return V; | 496 | 0 | break; | 497 | 23.8k | } | 498 | 23.8k | V = GEP->getPointerOperand(); | 499 | 13.7M | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 456k | V = cast<Operator>(V)->getOperand(0); | 501 | 13.2M | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 13.2M | Operator::getOpcode(V) == Instruction::AddrSpaceCast) { | 503 | 36 | // TODO: If we know an address space cast will not change the | 504 | 36 | // representation we could look through it here as well. | 505 | 36 | V = cast<Operator>(V)->getOperand(0); | 506 | 13.2M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 1.06k | if (StripKind == PSK_ZeroIndices || GA->isInterposable()0 ) | 508 | 1.06k | return V; | 509 | 0 | V = GA->getAliasee(); | 510 | 13.2M | } else { | 511 | 13.2M | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 38 | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 0 | V = RV; | 514 | 0 | continue; | 515 | 0 | } | 516 | 38 | // The result of launder.invariant.group must alias it's argument, | 517 | 38 | // but it can't be marked with returned attribute, that's why it needs | 518 | 38 | // special case. | 519 | 38 | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 38 | (0 Call->getIntrinsicID() == Intrinsic::launder_invariant_group0 || | 521 | 0 | Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { | 522 | 0 | V = Call->getArgOperand(0); | 523 | 0 | continue; | 524 | 0 | } | 525 | 13.2M | } | 526 | 13.2M | return V; | 527 | 13.2M | } | 528 | 480k | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 480k | } while (Visited.insert(V).second); | 530 | 13.2M | | 531 | 13.2M | return V0 ; | 532 | 13.2M | } |
Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)4>(llvm::Value const*) Line | Count | Source | 470 | 1.08k | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 1.08k | if (!V->getType()->isPointerTy()) | 472 | 71 | return V; | 473 | 1.00k | | 474 | 1.00k | // Even though we don't look through PHI nodes, we could be called on an | 475 | 1.00k | // instruction in an unreachable block, which may be on a cycle. | 476 | 1.00k | SmallPtrSet<const Value *, 4> Visited; | 477 | 1.00k | | 478 | 1.00k | Visited.insert(V); | 479 | 1.08k | do { | 480 | 1.08k | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 16 | switch (StripKind) { | 482 | 16 | case PSK_ZeroIndicesAndAliases: | 483 | 0 | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 0 | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 0 | case PSK_ZeroIndices: | 486 | 0 | if (!GEP->hasAllZeroIndices()) | 487 | 0 | return V; | 488 | 0 | break; | 489 | 16 | case PSK_InBoundsConstantIndices: | 490 | 16 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 16 | LLVM_FALLTHROUGH; | 493 | 16 | case PSK_InBounds: | 494 | 16 | if (!GEP->isInBounds()) | 495 | 0 | return V; | 496 | 16 | break; | 497 | 16 | } | 498 | 16 | V = GEP->getPointerOperand(); | 499 | 1.06k | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 58 | V = cast<Operator>(V)->getOperand(0); | 501 | 1.00k | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 1.00k | Operator::getOpcode(V) == Instruction::AddrSpaceCast) { | 503 | 0 | // TODO: If we know an address space cast will not change the | 504 | 0 | // representation we could look through it here as well. | 505 | 0 | V = cast<Operator>(V)->getOperand(0); | 506 | 1.00k | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 0 | if (StripKind == PSK_ZeroIndices || GA->isInterposable()) | 508 | 0 | return V; | 509 | 0 | V = GA->getAliasee(); | 510 | 1.00k | } else { | 511 | 1.00k | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 0 | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 0 | V = RV; | 514 | 0 | continue; | 515 | 0 | } | 516 | 0 | // The result of launder.invariant.group must alias it's argument, | 517 | 0 | // but it can't be marked with returned attribute, that's why it needs | 518 | 0 | // special case. | 519 | 0 | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 0 | (Call->getIntrinsicID() == Intrinsic::launder_invariant_group || | 521 | 0 | Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { | 522 | 0 | V = Call->getArgOperand(0); | 523 | 0 | continue; | 524 | 0 | } | 525 | 1.00k | } | 526 | 1.00k | return V; | 527 | 1.00k | } | 528 | 74 | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 74 | } while (Visited.insert(V).second); | 530 | 1.00k | | 531 | 1.00k | return V0 ; | 532 | 1.00k | } |
Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)3>(llvm::Value const*) Line | Count | Source | 470 | 202M | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 202M | if (!V->getType()->isPointerTy()) | 472 | 77 | return V; | 473 | 202M | | 474 | 202M | // Even though we don't look through PHI nodes, we could be called on an | 475 | 202M | // instruction in an unreachable block, which may be on a cycle. | 476 | 202M | SmallPtrSet<const Value *, 4> Visited; | 477 | 202M | | 478 | 202M | Visited.insert(V); | 479 | 246M | do { | 480 | 246M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 92.4M | switch (StripKind) { | 482 | 92.4M | case PSK_ZeroIndicesAndAliases: | 483 | 0 | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 92.4M | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 92.4M | case PSK_ZeroIndices: | 486 | 92.4M | if (!GEP->hasAllZeroIndices()) | 487 | 87.1M | return V; | 488 | 5.26M | break; | 489 | 5.26M | case PSK_InBoundsConstantIndices: | 490 | 0 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 0 | LLVM_FALLTHROUGH; | 493 | 0 | case PSK_InBounds: | 494 | 0 | if (!GEP->isInBounds()) | 495 | 0 | return V; | 496 | 0 | break; | 497 | 5.26M | } | 498 | 5.26M | V = GEP->getPointerOperand(); | 499 | 154M | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 38.8M | V = cast<Operator>(V)->getOperand(0); | 501 | 115M | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 115M | Operator::getOpcode(V) == Instruction::AddrSpaceCast) { | 503 | 258 | // TODO: If we know an address space cast will not change the | 504 | 258 | // representation we could look through it here as well. | 505 | 258 | V = cast<Operator>(V)->getOperand(0); | 506 | 115M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 0 | if (StripKind == PSK_ZeroIndices || GA->isInterposable()) | 508 | 0 | return V; | 509 | 0 | V = GA->getAliasee(); | 510 | 115M | } else { | 511 | 115M | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 5.26M | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 631 | V = RV; | 514 | 631 | continue; | 515 | 631 | } | 516 | 5.25M | // The result of launder.invariant.group must alias it's argument, | 517 | 5.25M | // but it can't be marked with returned attribute, that's why it needs | 518 | 5.25M | // special case. | 519 | 5.25M | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 5.25M | (Call->getIntrinsicID() == Intrinsic::launder_invariant_group || | 521 | 5.25M | Call->getIntrinsicID() == Intrinsic::strip_invariant_group5.25M )) { | 522 | 103 | V = Call->getArgOperand(0); | 523 | 103 | continue; | 524 | 103 | } | 525 | 115M | } | 526 | 115M | return V; | 527 | 115M | } | 528 | 44.1M | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 44.1M | } while (Visited.insert(V).second); | 530 | 202M | | 531 | 202M | return V18 ; | 532 | 202M | } |
Value.cpp:llvm::Value const* (anonymous namespace)::stripPointerCastsAndOffsets<((anonymous namespace)::PointerStripKind)5>(llvm::Value const*) Line | Count | Source | 470 | 551k | static const Value *stripPointerCastsAndOffsets(const Value *V) { | 471 | 551k | if (!V->getType()->isPointerTy()) | 472 | 0 | return V; | 473 | 551k | | 474 | 551k | // Even though we don't look through PHI nodes, we could be called on an | 475 | 551k | // instruction in an unreachable block, which may be on a cycle. | 476 | 551k | SmallPtrSet<const Value *, 4> Visited; | 477 | 551k | | 478 | 551k | Visited.insert(V); | 479 | 663k | do { | 480 | 663k | if (auto *GEP = dyn_cast<GEPOperator>(V)) { | 481 | 57.7k | switch (StripKind) { | 482 | 57.7k | case PSK_ZeroIndicesAndAliases: | 483 | 0 | case PSK_ZeroIndicesAndAliasesSameRepresentation: | 484 | 0 | case PSK_ZeroIndicesAndAliasesAndInvariantGroups: | 485 | 0 | case PSK_ZeroIndices: | 486 | 0 | if (!GEP->hasAllZeroIndices()) | 487 | 0 | return V; | 488 | 0 | break; | 489 | 0 | case PSK_InBoundsConstantIndices: | 490 | 0 | if (!GEP->hasAllConstantIndices()) | 491 | 0 | return V; | 492 | 0 | LLVM_FALLTHROUGH; | 493 | 57.7k | case PSK_InBounds: | 494 | 57.7k | if (!GEP->isInBounds()) | 495 | 968 | return V; | 496 | 56.7k | break; | 497 | 56.7k | } | 498 | 56.7k | V = GEP->getPointerOperand(); | 499 | 606k | } else if (Operator::getOpcode(V) == Instruction::BitCast) { | 500 | 55.4k | V = cast<Operator>(V)->getOperand(0); | 501 | 550k | } else if (StripKind != PSK_ZeroIndicesAndAliasesSameRepresentation && | 502 | 550k | Operator::getOpcode(V) == Instruction::AddrSpaceCast) { | 503 | 52 | // TODO: If we know an address space cast will not change the | 504 | 52 | // representation we could look through it here as well. | 505 | 52 | V = cast<Operator>(V)->getOperand(0); | 506 | 550k | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { | 507 | 222 | if (StripKind == PSK_ZeroIndices || GA->isInterposable()) | 508 | 1 | return V; | 509 | 221 | V = GA->getAliasee(); | 510 | 550k | } else { | 511 | 550k | if (const auto *Call = dyn_cast<CallBase>(V)) { | 512 | 47.4k | if (const Value *RV = Call->getReturnedArgOperand()) { | 513 | 1 | V = RV; | 514 | 1 | continue; | 515 | 1 | } | 516 | 47.4k | // The result of launder.invariant.group must alias it's argument, | 517 | 47.4k | // but it can't be marked with returned attribute, that's why it needs | 518 | 47.4k | // special case. | 519 | 47.4k | if (StripKind == PSK_ZeroIndicesAndAliasesAndInvariantGroups && | 520 | 47.4k | (0 Call->getIntrinsicID() == Intrinsic::launder_invariant_group0 || | 521 | 0 | Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { | 522 | 0 | V = Call->getArgOperand(0); | 523 | 0 | continue; | 524 | 0 | } | 525 | 550k | } | 526 | 550k | return V; | 527 | 550k | } | 528 | 112k | assert(V->getType()->isPointerTy() && "Unexpected operand type!"); | 529 | 112k | } while (Visited.insert(V).second); | 530 | 551k | | 531 | 551k | return V2 ; | 532 | 551k | } |
|
533 | | } // end anonymous namespace |
534 | | |
535 | 163M | const Value *Value::stripPointerCasts() const { |
536 | 163M | return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this); |
537 | 163M | } |
538 | | |
539 | 3.44M | const Value *Value::stripPointerCastsSameRepresentation() const { |
540 | 3.44M | return stripPointerCastsAndOffsets< |
541 | 3.44M | PSK_ZeroIndicesAndAliasesSameRepresentation>(this); |
542 | 3.44M | } |
543 | | |
544 | 13.2M | const Value *Value::stripPointerCastsNoFollowAliases() const { |
545 | 13.2M | return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this); |
546 | 13.2M | } |
547 | | |
548 | 1.08k | const Value *Value::stripInBoundsConstantOffsets() const { |
549 | 1.08k | return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this); |
550 | 1.08k | } |
551 | | |
552 | 202M | const Value *Value::stripPointerCastsAndInvariantGroups() const { |
553 | 202M | return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliasesAndInvariantGroups>( |
554 | 202M | this); |
555 | 202M | } |
556 | | |
557 | | const Value * |
558 | | Value::stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, |
559 | 36.3M | bool AllowNonInbounds) const { |
560 | 36.3M | if (!getType()->isPtrOrPtrVectorTy()) |
561 | 3 | return this; |
562 | 36.3M | |
563 | 36.3M | unsigned BitWidth = Offset.getBitWidth(); |
564 | 36.3M | assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) && |
565 | 36.3M | "The offset bit width does not match the DL specification."); |
566 | 36.3M | |
567 | 36.3M | // Even though we don't look through PHI nodes, we could be called on an |
568 | 36.3M | // instruction in an unreachable block, which may be on a cycle. |
569 | 36.3M | SmallPtrSet<const Value *, 4> Visited; |
570 | 36.3M | Visited.insert(this); |
571 | 36.3M | const Value *V = this; |
572 | 42.7M | do { |
573 | 42.7M | if (auto *GEP = dyn_cast<GEPOperator>(V)) { |
574 | 6.64M | // If in-bounds was requested, we do not strip non-in-bounds GEPs. |
575 | 6.64M | if (!AllowNonInbounds && !GEP->isInBounds()5.26M ) |
576 | 376k | return V; |
577 | 6.26M | |
578 | 6.26M | // If one of the values we have visited is an addrspacecast, then |
579 | 6.26M | // the pointer type of this GEP may be different from the type |
580 | 6.26M | // of the Ptr parameter which was passed to this function. This |
581 | 6.26M | // means when we construct GEPOffset, we need to use the size |
582 | 6.26M | // of GEP's pointer type rather than the size of the original |
583 | 6.26M | // pointer type. |
584 | 6.26M | APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0); |
585 | 6.26M | if (!GEP->accumulateConstantOffset(DL, GEPOffset)) |
586 | 969k | return V; |
587 | 5.29M | |
588 | 5.29M | // Stop traversal if the pointer offset wouldn't fit in the bit-width |
589 | 5.29M | // provided by the Offset argument. This can happen due to AddrSpaceCast |
590 | 5.29M | // stripping. |
591 | 5.29M | if (GEPOffset.getMinSignedBits() > BitWidth) |
592 | 0 | return V; |
593 | 5.29M | |
594 | 5.29M | Offset += GEPOffset.sextOrTrunc(BitWidth); |
595 | 5.29M | V = GEP->getPointerOperand(); |
596 | 36.0M | } else if (Operator::getOpcode(V) == Instruction::BitCast || |
597 | 36.0M | Operator::getOpcode(V) == Instruction::AddrSpaceCast35.0M ) { |
598 | 1.08M | V = cast<Operator>(V)->getOperand(0); |
599 | 35.0M | } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { |
600 | 27 | if (!GA->isInterposable()) |
601 | 27 | V = GA->getAliasee(); |
602 | 35.0M | } else if (const auto *Call = dyn_cast<CallBase>(V)) { |
603 | 4.04M | if (const Value *RV = Call->getReturnedArgOperand()) |
604 | 610 | V = RV; |
605 | 4.04M | } |
606 | 42.7M | assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!"); |
607 | 41.3M | } while (Visited.insert(V).second); |
608 | 36.3M | |
609 | 36.3M | return V34.9M ; |
610 | 36.3M | } |
611 | | |
612 | 551k | const Value *Value::stripInBoundsOffsets() const { |
613 | 551k | return stripPointerCastsAndOffsets<PSK_InBounds>(this); |
614 | 551k | } |
615 | | |
616 | | uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL, |
617 | 6.66M | bool &CanBeNull) const { |
618 | 6.66M | assert(getType()->isPointerTy() && "must be pointer"); |
619 | 6.66M | |
620 | 6.66M | uint64_t DerefBytes = 0; |
621 | 6.66M | CanBeNull = false; |
622 | 6.66M | if (const Argument *A = dyn_cast<Argument>(this)) { |
623 | 1.23M | DerefBytes = A->getDereferenceableBytes(); |
624 | 1.23M | if (DerefBytes == 0 && (1.00M A->hasByValAttr()1.00M || A->hasStructRetAttr()1.00M )) { |
625 | 2.92k | Type *PT = cast<PointerType>(A->getType())->getElementType(); |
626 | 2.92k | if (PT->isSized()) |
627 | 2.92k | DerefBytes = DL.getTypeStoreSize(PT); |
628 | 2.92k | } |
629 | 1.23M | if (DerefBytes == 0) { |
630 | 1.00M | DerefBytes = A->getDereferenceableOrNullBytes(); |
631 | 1.00M | CanBeNull = true; |
632 | 1.00M | } |
633 | 5.42M | } else if (const auto *Call = dyn_cast<CallBase>(this)) { |
634 | 186k | DerefBytes = Call->getDereferenceableBytes(AttributeList::ReturnIndex); |
635 | 186k | if (DerefBytes == 0) { |
636 | 154k | DerefBytes = |
637 | 154k | Call->getDereferenceableOrNullBytes(AttributeList::ReturnIndex); |
638 | 154k | CanBeNull = true; |
639 | 154k | } |
640 | 5.23M | } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) { |
641 | 379k | if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) { |
642 | 18 | ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); |
643 | 18 | DerefBytes = CI->getLimitedValue(); |
644 | 18 | } |
645 | 379k | if (DerefBytes == 0) { |
646 | 379k | if (MDNode *MD = |
647 | 12 | LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { |
648 | 12 | ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); |
649 | 12 | DerefBytes = CI->getLimitedValue(); |
650 | 12 | } |
651 | 379k | CanBeNull = true; |
652 | 379k | } |
653 | 4.85M | } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) { |
654 | 54.8k | if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) { |
655 | 2 | ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); |
656 | 2 | DerefBytes = CI->getLimitedValue(); |
657 | 2 | } |
658 | 54.8k | if (DerefBytes == 0) { |
659 | 54.8k | if (MDNode *MD = |
660 | 0 | IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { |
661 | 0 | ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); |
662 | 0 | DerefBytes = CI->getLimitedValue(); |
663 | 0 | } |
664 | 54.8k | CanBeNull = true; |
665 | 54.8k | } |
666 | 4.80M | } else if (auto *AI = dyn_cast<AllocaInst>(this)) { |
667 | 703k | if (!AI->isArrayAllocation()) { |
668 | 703k | DerefBytes = DL.getTypeStoreSize(AI->getAllocatedType()); |
669 | 703k | CanBeNull = false; |
670 | 703k | } |
671 | 4.09M | } else if (auto *GV = dyn_cast<GlobalVariable>(this)) { |
672 | 608k | if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()608k ) { |
673 | 608k | // TODO: Don't outright reject hasExternalWeakLinkage but set the |
674 | 608k | // CanBeNull flag. |
675 | 608k | DerefBytes = DL.getTypeStoreSize(GV->getValueType()); |
676 | 608k | CanBeNull = false; |
677 | 608k | } |
678 | 608k | } |
679 | 6.66M | return DerefBytes; |
680 | 6.66M | } |
681 | | |
682 | 128M | unsigned Value::getPointerAlignment(const DataLayout &DL) const { |
683 | 128M | assert(getType()->isPointerTy() && "must be pointer"); |
684 | 128M | |
685 | 128M | unsigned Align = 0; |
686 | 128M | if (auto *GO = dyn_cast<GlobalObject>(this)) { |
687 | 6.26M | if (isa<Function>(GO)) { |
688 | 2.66k | switch (DL.getFunctionPtrAlignType()) { |
689 | 2.66k | case DataLayout::FunctionPtrAlignType::Independent: |
690 | 2.65k | return DL.getFunctionPtrAlign(); |
691 | 2.66k | case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign: |
692 | 10 | return std::max(DL.getFunctionPtrAlign(), GO->getAlignment()); |
693 | 6.26M | } |
694 | 6.26M | } |
695 | 6.26M | Align = GO->getAlignment(); |
696 | 6.26M | if (Align == 0) { |
697 | 62.0k | if (auto *GVar = dyn_cast<GlobalVariable>(GO)) { |
698 | 62.0k | Type *ObjectType = GVar->getValueType(); |
699 | 62.0k | if (ObjectType->isSized()) { |
700 | 62.0k | // If the object is defined in the current Module, we'll be giving |
701 | 62.0k | // it the preferred alignment. Otherwise, we have to assume that it |
702 | 62.0k | // may only have the minimum ABI alignment. |
703 | 62.0k | if (GVar->isStrongDefinitionForLinker()) |
704 | 30.4k | Align = DL.getPreferredAlignment(GVar); |
705 | 31.6k | else |
706 | 31.6k | Align = DL.getABITypeAlignment(ObjectType); |
707 | 62.0k | } |
708 | 62.0k | } |
709 | 62.0k | } |
710 | 122M | } else if (const Argument *A = dyn_cast<Argument>(this)) { |
711 | 25.4M | Align = A->getParamAlignment(); |
712 | 25.4M | |
713 | 25.4M | if (!Align && A->hasStructRetAttr()25.4M ) { |
714 | 172k | // An sret parameter has at least the ABI alignment of the return type. |
715 | 172k | Type *EltTy = cast<PointerType>(A->getType())->getElementType(); |
716 | 172k | if (EltTy->isSized()) |
717 | 172k | Align = DL.getABITypeAlignment(EltTy); |
718 | 172k | } |
719 | 97.1M | } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) { |
720 | 7.86M | Align = AI->getAlignment(); |
721 | 7.86M | if (Align == 0) { |
722 | 3.14k | Type *AllocatedType = AI->getAllocatedType(); |
723 | 3.14k | if (AllocatedType->isSized()) |
724 | 3.14k | Align = DL.getPrefTypeAlignment(AllocatedType); |
725 | 3.14k | } |
726 | 89.2M | } else if (const auto *Call = dyn_cast<CallBase>(this)) |
727 | 14.8M | Align = Call->getAttributes().getRetAlignment(); |
728 | 74.3M | else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) |
729 | 22.0M | if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) { |
730 | 5 | ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); |
731 | 5 | Align = CI->getLimitedValue(); |
732 | 5 | } |
733 | 128M | |
734 | 128M | return Align128M ; |
735 | 128M | } |
736 | | |
737 | | const Value *Value::DoPHITranslation(const BasicBlock *CurBB, |
738 | 726k | const BasicBlock *PredBB) const { |
739 | 726k | auto *PN = dyn_cast<PHINode>(this); |
740 | 726k | if (PN && PN->getParent() == CurBB75.9k ) |
741 | 68.8k | return PN->getIncomingValueForBlock(PredBB); |
742 | 657k | return this; |
743 | 657k | } |
744 | | |
745 | 1.51G | LLVMContext &Value::getContext() const { return VTy->getContext(); } |
746 | | |
747 | 776 | void Value::reverseUseList() { |
748 | 776 | if (!UseList || !UseList->Next) |
749 | 0 | // No need to reverse 0 or 1 uses. |
750 | 0 | return; |
751 | 776 | |
752 | 776 | Use *Head = UseList; |
753 | 776 | Use *Current = UseList->Next; |
754 | 776 | Head->Next = nullptr; |
755 | 2.58k | while (Current) { |
756 | 1.80k | Use *Next = Current->Next; |
757 | 1.80k | Current->Next = Head; |
758 | 1.80k | Head->setPrev(&Current->Next); |
759 | 1.80k | Head = Current; |
760 | 1.80k | Current = Next; |
761 | 1.80k | } |
762 | 776 | UseList = Head; |
763 | 776 | Head->setPrev(&UseList); |
764 | 776 | } |
765 | | |
766 | 31.3M | bool Value::isSwiftError() const { |
767 | 31.3M | auto *Arg = dyn_cast<Argument>(this); |
768 | 31.3M | if (Arg) |
769 | 302k | return Arg->hasSwiftErrorAttr(); |
770 | 31.0M | auto *Alloca = dyn_cast<AllocaInst>(this); |
771 | 31.0M | if (!Alloca) |
772 | 30.4M | return false; |
773 | 565k | return Alloca->isSwiftError(); |
774 | 565k | } |
775 | | |
776 | | //===----------------------------------------------------------------------===// |
777 | | // ValueHandleBase Class |
778 | | //===----------------------------------------------------------------------===// |
779 | | |
780 | 251M | void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) { |
781 | 251M | assert(List && "Handle list is null?"); |
782 | 251M | |
783 | 251M | // Splice ourselves into the list. |
784 | 251M | Next = *List; |
785 | 251M | *List = this; |
786 | 251M | setPrevPtr(List); |
787 | 251M | if (Next) { |
788 | 172M | Next->setPrevPtr(&Next); |
789 | 172M | assert(getValPtr() == Next->getValPtr() && "Added to wrong list?"); |
790 | 172M | } |
791 | 251M | } |
792 | | |
793 | 3.57M | void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) { |
794 | 3.57M | assert(List && "Must insert after existing node"); |
795 | 3.57M | |
796 | 3.57M | Next = List->Next; |
797 | 3.57M | setPrevPtr(&List->Next); |
798 | 3.57M | List->Next = this; |
799 | 3.57M | if (Next) |
800 | 1.00M | Next->setPrevPtr(&Next); |
801 | 3.57M | } |
802 | | |
803 | 124M | void ValueHandleBase::AddToUseList() { |
804 | 124M | assert(getValPtr() && "Null pointer doesn't have a use list!"); |
805 | 124M | |
806 | 124M | LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; |
807 | 124M | |
808 | 124M | if (getValPtr()->HasValueHandle) { |
809 | 45.6M | // If this value already has a ValueHandle, then it must be in the |
810 | 45.6M | // ValueHandles map already. |
811 | 45.6M | ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()]; |
812 | 45.6M | assert(Entry && "Value doesn't have any handles?"); |
813 | 45.6M | AddToExistingUseList(&Entry); |
814 | 45.6M | return; |
815 | 45.6M | } |
816 | 79.2M | |
817 | 79.2M | // Ok, it doesn't have any handles yet, so we must insert it into the |
818 | 79.2M | // DenseMap. However, doing this insertion could cause the DenseMap to |
819 | 79.2M | // reallocate itself, which would invalidate all of the PrevP pointers that |
820 | 79.2M | // point into the old table. Handle this by checking for reallocation and |
821 | 79.2M | // updating the stale pointers only if needed. |
822 | 79.2M | DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; |
823 | 79.2M | const void *OldBucketPtr = Handles.getPointerIntoBucketsArray(); |
824 | 79.2M | |
825 | 79.2M | ValueHandleBase *&Entry = Handles[getValPtr()]; |
826 | 79.2M | assert(!Entry && "Value really did already have handles?"); |
827 | 79.2M | AddToExistingUseList(&Entry); |
828 | 79.2M | getValPtr()->HasValueHandle = true; |
829 | 79.2M | |
830 | 79.2M | // If reallocation didn't happen or if this was the first insertion, don't |
831 | 79.2M | // walk the table. |
832 | 79.2M | if (Handles.isPointerIntoBucketsArray(OldBucketPtr) || |
833 | 79.2M | Handles.size() == 1114k ) { |
834 | 79.1M | return; |
835 | 79.1M | } |
836 | 62.4k | |
837 | 62.4k | // Okay, reallocation did happen. Fix the Prev Pointers. |
838 | 62.4k | for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(), |
839 | 23.4M | E = Handles.end(); I != E; ++I23.4M ) { |
840 | 23.4M | assert(I->second && I->first == I->second->getValPtr() && |
841 | 23.4M | "List invariant broken!"); |
842 | 23.4M | I->second->setPrevPtr(&I->second); |
843 | 23.4M | } |
844 | 62.4k | } |
845 | | |
846 | 255M | void ValueHandleBase::RemoveFromUseList() { |
847 | 255M | assert(getValPtr() && getValPtr()->HasValueHandle && |
848 | 255M | "Pointer doesn't have a use list!"); |
849 | 255M | |
850 | 255M | // Unlink this from its use list. |
851 | 255M | ValueHandleBase **PrevPtr = getPrevPtr(); |
852 | 255M | assert(*PrevPtr == this && "List invariant broken"); |
853 | 255M | |
854 | 255M | *PrevPtr = Next; |
855 | 255M | if (Next) { |
856 | 103M | assert(Next->getPrevPtr() == &Next && "List invariant broken"); |
857 | 103M | Next->setPrevPtr(PrevPtr); |
858 | 103M | return; |
859 | 103M | } |
860 | 151M | |
861 | 151M | // If the Next pointer was null, then it is possible that this was the last |
862 | 151M | // ValueHandle watching VP. If so, delete its entry from the ValueHandles |
863 | 151M | // map. |
864 | 151M | LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; |
865 | 151M | DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; |
866 | 151M | if (Handles.isPointerIntoBucketsArray(PrevPtr)) { |
867 | 79.2M | Handles.erase(getValPtr()); |
868 | 79.2M | getValPtr()->HasValueHandle = false; |
869 | 79.2M | } |
870 | 151M | } |
871 | | |
872 | 1.56M | void ValueHandleBase::ValueIsDeleted(Value *V) { |
873 | 1.56M | assert(V->HasValueHandle && "Should only be called if ValueHandles present"); |
874 | 1.56M | |
875 | 1.56M | // Get the linked list base, which is guaranteed to exist since the |
876 | 1.56M | // HasValueHandle flag is set. |
877 | 1.56M | LLVMContextImpl *pImpl = V->getContext().pImpl; |
878 | 1.56M | ValueHandleBase *Entry = pImpl->ValueHandles[V]; |
879 | 1.56M | assert(Entry && "Value bit set but no entries exist"); |
880 | 1.56M | |
881 | 1.56M | // We use a local ValueHandleBase as an iterator so that ValueHandles can add |
882 | 1.56M | // and remove themselves from the list without breaking our iteration. This |
883 | 1.56M | // is not really an AssertingVH; we just have to give ValueHandleBase a kind. |
884 | 1.56M | // Note that we deliberately do not the support the case when dropping a value |
885 | 1.56M | // handle results in a new value handle being permanently added to the list |
886 | 1.56M | // (as might occur in theory for CallbackVH's): the new value handle will not |
887 | 1.56M | // be processed and the checking code will mete out righteous punishment if |
888 | 1.56M | // the handle is still present once we have finished processing all the other |
889 | 1.56M | // value handles (it is fine to momentarily add then remove a value handle). |
890 | 3.94M | for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next2.38M ) { |
891 | 2.38M | Iterator.RemoveFromUseList(); |
892 | 2.38M | Iterator.AddToExistingUseListAfter(Entry); |
893 | 2.38M | assert(Entry->Next == &Iterator && "Loop invariant broken."); |
894 | 2.38M | |
895 | 2.38M | switch (Entry->getKind()) { |
896 | 2.38M | case Assert: |
897 | 0 | break; |
898 | 2.38M | case Weak: |
899 | 568k | case WeakTracking: |
900 | 568k | // WeakTracking and Weak just go to null, which unlinks them |
901 | 568k | // from the list. |
902 | 568k | Entry->operator=(nullptr); |
903 | 568k | break; |
904 | 1.81M | case Callback: |
905 | 1.81M | // Forward to the subclass's implementation. |
906 | 1.81M | static_cast<CallbackVH*>(Entry)->deleted(); |
907 | 1.81M | break; |
908 | 2.38M | } |
909 | 2.38M | } |
910 | 1.56M | |
911 | 1.56M | // All callbacks, weak references, and assertingVHs should be dropped by now. |
912 | 1.56M | if (V->HasValueHandle) { |
913 | | #ifndef NDEBUG // Only in +Asserts mode... |
914 | | dbgs() << "While deleting: " << *V->getType() << " %" << V->getName() |
915 | | << "\n"; |
916 | | if (pImpl->ValueHandles[V]->getKind() == Assert) |
917 | | llvm_unreachable("An asserting value handle still pointed to this" |
918 | | " value!"); |
919 | | |
920 | | #endif |
921 | 0 | llvm_unreachable("All references to V were not removed?"); |
922 | 0 | } |
923 | 1.56M | } |
924 | | |
925 | 1.00M | void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) { |
926 | 1.00M | assert(Old->HasValueHandle &&"Should only be called if ValueHandles present"); |
927 | 1.00M | assert(Old != New && "Changing value into itself!"); |
928 | 1.00M | assert(Old->getType() == New->getType() && |
929 | 1.00M | "replaceAllUses of value with new value of different type!"); |
930 | 1.00M | |
931 | 1.00M | // Get the linked list base, which is guaranteed to exist since the |
932 | 1.00M | // HasValueHandle flag is set. |
933 | 1.00M | LLVMContextImpl *pImpl = Old->getContext().pImpl; |
934 | 1.00M | ValueHandleBase *Entry = pImpl->ValueHandles[Old]; |
935 | 1.00M | |
936 | 1.00M | assert(Entry && "Value bit set but no entries exist"); |
937 | 1.00M | |
938 | 1.00M | // We use a local ValueHandleBase as an iterator so that |
939 | 1.00M | // ValueHandles can add and remove themselves from the list without |
940 | 1.00M | // breaking our iteration. This is not really an AssertingVH; we |
941 | 1.00M | // just have to give ValueHandleBase some kind. |
942 | 2.19M | for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next1.19M ) { |
943 | 1.19M | Iterator.RemoveFromUseList(); |
944 | 1.19M | Iterator.AddToExistingUseListAfter(Entry); |
945 | 1.19M | assert(Entry->Next == &Iterator && "Loop invariant broken."); |
946 | 1.19M | |
947 | 1.19M | switch (Entry->getKind()) { |
948 | 1.19M | case Assert: |
949 | 20.0k | case Weak: |
950 | 20.0k | // Asserting and Weak handles do not follow RAUW implicitly. |
951 | 20.0k | break; |
952 | 758k | case WeakTracking: |
953 | 758k | // Weak goes to the new value, which will unlink it from Old's list. |
954 | 758k | Entry->operator=(New); |
955 | 758k | break; |
956 | 413k | case Callback: |
957 | 413k | // Forward to the subclass's implementation. |
958 | 413k | static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New); |
959 | 413k | break; |
960 | 1.19M | } |
961 | 1.19M | } |
962 | 1.00M | |
963 | | #ifndef NDEBUG |
964 | | // If any new weak value handles were added while processing the |
965 | | // list, then complain about it now. |
966 | | if (Old->HasValueHandle) |
967 | | for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next) |
968 | | switch (Entry->getKind()) { |
969 | | case WeakTracking: |
970 | | dbgs() << "After RAUW from " << *Old->getType() << " %" |
971 | | << Old->getName() << " to " << *New->getType() << " %" |
972 | | << New->getName() << "\n"; |
973 | | llvm_unreachable( |
974 | | "A weak tracking value handle still pointed to the old value!\n"); |
975 | | default: |
976 | | break; |
977 | | } |
978 | | #endif |
979 | | } |
980 | | |
981 | | // Pin the vtable to this file. |
982 | 0 | void CallbackVH::anchor() {} |