/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/lib/Bitcode/Writer/ValueEnumerator.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- ValueEnumerator.cpp - Number values and types for bitcode writer ---===// |
2 | | // |
3 | | // The LLVM Compiler Infrastructure |
4 | | // |
5 | | // This file is distributed under the University of Illinois Open Source |
6 | | // License. See LICENSE.TXT for details. |
7 | | // |
8 | | //===----------------------------------------------------------------------===// |
9 | | // |
10 | | // This file implements the ValueEnumerator class. |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "ValueEnumerator.h" |
15 | | #include "llvm/ADT/DenseMap.h" |
16 | | #include "llvm/ADT/SmallVector.h" |
17 | | #include "llvm/IR/Argument.h" |
18 | | #include "llvm/IR/Attributes.h" |
19 | | #include "llvm/IR/BasicBlock.h" |
20 | | #include "llvm/IR/Constant.h" |
21 | | #include "llvm/IR/DebugInfoMetadata.h" |
22 | | #include "llvm/IR/DerivedTypes.h" |
23 | | #include "llvm/IR/Function.h" |
24 | | #include "llvm/IR/GlobalAlias.h" |
25 | | #include "llvm/IR/GlobalIFunc.h" |
26 | | #include "llvm/IR/GlobalObject.h" |
27 | | #include "llvm/IR/GlobalValue.h" |
28 | | #include "llvm/IR/GlobalVariable.h" |
29 | | #include "llvm/IR/Instruction.h" |
30 | | #include "llvm/IR/Instructions.h" |
31 | | #include "llvm/IR/Metadata.h" |
32 | | #include "llvm/IR/Module.h" |
33 | | #include "llvm/IR/Type.h" |
34 | | #include "llvm/IR/Use.h" |
35 | | #include "llvm/IR/UseListOrder.h" |
36 | | #include "llvm/IR/User.h" |
37 | | #include "llvm/IR/Value.h" |
38 | | #include "llvm/IR/ValueSymbolTable.h" |
39 | | #include "llvm/Support/Casting.h" |
40 | | #include "llvm/Support/Compiler.h" |
41 | | #include "llvm/Support/Debug.h" |
42 | | #include "llvm/Support/MathExtras.h" |
43 | | #include "llvm/Support/raw_ostream.h" |
44 | | #include <algorithm> |
45 | | #include <cassert> |
46 | | #include <cstddef> |
47 | | #include <iterator> |
48 | | #include <tuple> |
49 | | #include <utility> |
50 | | #include <vector> |
51 | | |
52 | | using namespace llvm; |
53 | | |
54 | | namespace { |
55 | | |
56 | | struct OrderMap { |
57 | | DenseMap<const Value *, std::pair<unsigned, bool>> IDs; |
58 | | unsigned LastGlobalConstantID = 0; |
59 | | unsigned LastGlobalValueID = 0; |
60 | | |
61 | 2.43k | OrderMap() = default; |
62 | | |
63 | 6.13k | bool isGlobalConstant(unsigned ID) const { |
64 | 6.13k | return ID <= LastGlobalConstantID; |
65 | 6.13k | } |
66 | | |
67 | 93.4k | bool isGlobalValue(unsigned ID) const { |
68 | 6.13k | return ID <= LastGlobalValueID && !isGlobalConstant(ID); |
69 | 93.4k | } |
70 | | |
71 | 4.86k | unsigned size() const { return IDs.size(); } |
72 | 67.9k | std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; } |
73 | | |
74 | 255k | std::pair<unsigned, bool> lookup(const Value *V) const { |
75 | 255k | return IDs.lookup(V); |
76 | 255k | } |
77 | | |
78 | 52.7k | void index(const Value *V) { |
79 | 52.7k | // Explicitly sequence get-size and insert-value operations to avoid UB. |
80 | 52.7k | unsigned ID = IDs.size() + 1; |
81 | 52.7k | IDs[V].first = ID; |
82 | 52.7k | } |
83 | | }; |
84 | | |
85 | | } // end anonymous namespace |
86 | | |
87 | 58.5k | static void orderValue(const Value *V, OrderMap &OM) { |
88 | 58.5k | if (OM.lookup(V).first) |
89 | 5.81k | return; |
90 | 52.7k | |
91 | 52.7k | if (const Constant *52.7k C52.7k = dyn_cast<Constant>(V)) |
92 | 17.8k | if (17.8k C->getNumOperands() && 17.8k !isa<GlobalValue>(C)3.73k ) |
93 | 1.30k | for (const Value *Op : C->operands()) |
94 | 2.86k | if (2.86k !isa<BasicBlock>(Op) && 2.86k !isa<GlobalValue>(Op)2.82k ) |
95 | 2.00k | orderValue(Op, OM); |
96 | 58.5k | |
97 | 58.5k | // Note: we cannot cache this lookup above, since inserting into the map |
98 | 58.5k | // changes the map's size, and thus affects the other IDs. |
99 | 58.5k | OM.index(V); |
100 | 58.5k | } |
101 | | |
102 | 2.43k | static OrderMap orderModule(const Module &M) { |
103 | 2.43k | // This needs to match the order used by ValueEnumerator::ValueEnumerator() |
104 | 2.43k | // and ValueEnumerator::incorporateFunction(). |
105 | 2.43k | OrderMap OM; |
106 | 2.43k | |
107 | 2.43k | // In the reader, initializers of GlobalValues are set *after* all the |
108 | 2.43k | // globals have been read. Rather than awkwardly modeling this behaviour |
109 | 2.43k | // directly in predictValueUseListOrderImpl(), just assign IDs to |
110 | 2.43k | // initializers of GlobalValues before GlobalValues themselves to model this |
111 | 2.43k | // implicitly. |
112 | 2.43k | for (const GlobalVariable &G : M.globals()) |
113 | 2.17k | if (2.17k G.hasInitializer()2.17k ) |
114 | 1.78k | if (1.78k !isa<GlobalValue>(G.getInitializer())1.78k ) |
115 | 1.58k | orderValue(G.getInitializer(), OM); |
116 | 2.43k | for (const GlobalAlias &A : M.aliases()) |
117 | 513 | if (513 !isa<GlobalValue>(A.getAliasee())513 ) |
118 | 263 | orderValue(A.getAliasee(), OM); |
119 | 2.43k | for (const GlobalIFunc &I : M.ifuncs()) |
120 | 28 | if (28 !isa<GlobalValue>(I.getResolver())28 ) |
121 | 0 | orderValue(I.getResolver(), OM); |
122 | 5.89k | for (const Function &F : M) { |
123 | 5.89k | for (const Use &U : F.operands()) |
124 | 321 | if (321 !isa<GlobalValue>(U.get())321 ) |
125 | 274 | orderValue(U.get(), OM); |
126 | 5.89k | } |
127 | 2.43k | OM.LastGlobalConstantID = OM.size(); |
128 | 2.43k | |
129 | 2.43k | // Initializers of GlobalValues are processed in |
130 | 2.43k | // BitcodeReader::ResolveGlobalAndAliasInits(). Match the order there rather |
131 | 2.43k | // than ValueEnumerator, and match the code in predictValueUseListOrderImpl() |
132 | 2.43k | // by giving IDs in reverse order. |
133 | 2.43k | // |
134 | 2.43k | // Since GlobalValues never reference each other directly (just through |
135 | 2.43k | // initializers), their relative IDs only matter for determining order of |
136 | 2.43k | // uses in their initializers. |
137 | 2.43k | for (const Function &F : M) |
138 | 5.89k | orderValue(&F, OM); |
139 | 2.43k | for (const GlobalAlias &A : M.aliases()) |
140 | 513 | orderValue(&A, OM); |
141 | 2.43k | for (const GlobalIFunc &I : M.ifuncs()) |
142 | 28 | orderValue(&I, OM); |
143 | 2.43k | for (const GlobalVariable &G : M.globals()) |
144 | 2.17k | orderValue(&G, OM); |
145 | 2.43k | OM.LastGlobalValueID = OM.size(); |
146 | 2.43k | |
147 | 5.89k | for (const Function &F : M) { |
148 | 5.89k | if (F.isDeclaration()) |
149 | 1.67k | continue; |
150 | 4.22k | // Here we need to match the union of ValueEnumerator::incorporateFunction() |
151 | 4.22k | // and WriteFunction(). Basic blocks are implicitly declared before |
152 | 4.22k | // anything else (by declaring their size). |
153 | 4.22k | for (const BasicBlock &BB : F) |
154 | 7.18k | orderValue(&BB, OM); |
155 | 4.22k | for (const Argument &A : F.args()) |
156 | 2.91k | orderValue(&A, OM); |
157 | 4.22k | for (const BasicBlock &BB : F) |
158 | 7.18k | for (const Instruction &I : BB) |
159 | 24.7k | for (const Value *Op : I.operands()) |
160 | 40.2k | if (40.2k (isa<Constant>(*Op) && 40.2k !isa<GlobalValue>(*Op)16.3k ) || |
161 | 29.2k | isa<InlineAsm>(*Op)) |
162 | 11.0k | orderValue(Op, OM); |
163 | 4.22k | for (const BasicBlock &BB : F) |
164 | 7.18k | for (const Instruction &I : BB) |
165 | 24.7k | orderValue(&I, OM); |
166 | 5.89k | } |
167 | 2.43k | return OM; |
168 | 2.43k | } |
169 | | |
170 | | static void predictValueUseListOrderImpl(const Value *V, const Function *F, |
171 | | unsigned ID, const OrderMap &OM, |
172 | 5.93k | UseListOrderStack &Stack) { |
173 | 5.93k | // Predict use-list order for this one. |
174 | 5.93k | using Entry = std::pair<const Use *, unsigned>; |
175 | 5.93k | SmallVector<Entry, 64> List; |
176 | 5.93k | for (const Use &U : V->uses()) |
177 | 5.93k | // Check if this user will be serialized. |
178 | 25.4k | if (25.4k OM.lookup(U.getUser()).first25.4k ) |
179 | 25.0k | List.push_back(std::make_pair(&U, List.size())); |
180 | 5.93k | |
181 | 5.93k | if (List.size() < 2) |
182 | 5.93k | // We may have lost some users. |
183 | 278 | return; |
184 | 5.66k | |
185 | 5.66k | bool IsGlobalValue = OM.isGlobalValue(ID); |
186 | 87.9k | std::sort(List.begin(), List.end(), [&](const Entry &L, const Entry &R) { |
187 | 87.9k | const Use *LU = L.first; |
188 | 87.9k | const Use *RU = R.first; |
189 | 87.9k | if (LU == RU) |
190 | 2.28k | return false; |
191 | 85.6k | |
192 | 85.6k | auto LID = OM.lookup(LU->getUser()).first; |
193 | 85.6k | auto RID = OM.lookup(RU->getUser()).first; |
194 | 85.6k | |
195 | 85.6k | // Global values are processed in reverse order. |
196 | 85.6k | // |
197 | 85.6k | // Moreover, initializers of GlobalValues are set *after* all the globals |
198 | 85.6k | // have been read (despite having earlier IDs). Rather than awkwardly |
199 | 85.6k | // modeling this behaviour here, orderModule() has assigned IDs to |
200 | 85.6k | // initializers of GlobalValues before GlobalValues themselves. |
201 | 85.6k | if (OM.isGlobalValue(LID) && 85.6k OM.isGlobalValue(RID)2.12k ) |
202 | 1.95k | return LID < RID; |
203 | 83.6k | |
204 | 83.6k | // If ID is 4, then expect: 7 6 5 1 2 3. |
205 | 83.6k | if (83.6k LID < RID83.6k ) { |
206 | 55.1k | if (RID <= ID) |
207 | 156 | if (156 !IsGlobalValue156 ) // GlobalValue uses don't get reversed. |
208 | 9 | return true; |
209 | 55.1k | return false; |
210 | 55.1k | } |
211 | 28.5k | if (28.5k RID < LID28.5k ) { |
212 | 27.2k | if (LID <= ID) |
213 | 111 | if (111 !IsGlobalValue111 ) // GlobalValue uses don't get reversed. |
214 | 35 | return false; |
215 | 27.2k | return true; |
216 | 27.2k | } |
217 | 1.25k | |
218 | 1.25k | // LID and RID are equal, so we have different operands of the same user. |
219 | 1.25k | // Assume operands are added in order for all instructions. |
220 | 1.25k | if (1.25k LID <= ID1.25k ) |
221 | 52 | if (52 !IsGlobalValue52 ) // GlobalValue uses don't get reversed. |
222 | 14 | return LU->getOperandNo() < RU->getOperandNo(); |
223 | 1.24k | return LU->getOperandNo() > RU->getOperandNo(); |
224 | 1.24k | }); |
225 | 5.66k | |
226 | 5.66k | if (std::is_sorted( |
227 | 5.66k | List.begin(), List.end(), |
228 | 13.1k | [](const Entry &L, const Entry &R) { return L.second < R.second; })) |
229 | 5.66k | // Order is already correct. |
230 | 4.29k | return; |
231 | 1.37k | |
232 | 1.37k | // Store the shuffle. |
233 | 1.37k | Stack.emplace_back(V, F, List.size()); |
234 | 1.37k | assert(List.size() == Stack.back().Shuffle.size() && "Wrong size"); |
235 | 10.4k | for (size_t I = 0, E = List.size(); I != E10.4k ; ++I9.12k ) |
236 | 9.12k | Stack.back().Shuffle[I] = List[I].second; |
237 | 5.93k | } |
238 | | |
239 | | static void predictValueUseListOrder(const Value *V, const Function *F, |
240 | 67.9k | OrderMap &OM, UseListOrderStack &Stack) { |
241 | 67.9k | auto &IDPair = OM[V]; |
242 | 67.9k | assert(IDPair.first && "Unmapped value"); |
243 | 67.9k | if (IDPair.second) |
244 | 67.9k | // Already predicted. |
245 | 15.2k | return; |
246 | 52.7k | |
247 | 52.7k | // Do the actual prediction. |
248 | 52.7k | IDPair.second = true; |
249 | 52.7k | if (!V->use_empty() && 52.7k std::next(V->use_begin()) != V->use_end()26.2k ) |
250 | 5.93k | predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack); |
251 | 52.7k | |
252 | 52.7k | // Recursive descent into constants. |
253 | 52.7k | if (const Constant *C = dyn_cast<Constant>(V)) |
254 | 17.8k | if (17.8k C->getNumOperands()17.8k ) // Visit GlobalValues. |
255 | 3.73k | for (const Value *Op : C->operands()) |
256 | 5.50k | if (5.50k isa<Constant>(Op)5.50k ) // Visit GlobalValues. |
257 | 5.46k | predictValueUseListOrder(Op, F, OM, Stack); |
258 | 67.9k | } |
259 | | |
260 | 2.43k | static UseListOrderStack predictUseListOrder(const Module &M) { |
261 | 2.43k | OrderMap OM = orderModule(M); |
262 | 2.43k | |
263 | 2.43k | // Use-list orders need to be serialized after all the users have been added |
264 | 2.43k | // to a value, or else the shuffles will be incomplete. Store them per |
265 | 2.43k | // function in a stack. |
266 | 2.43k | // |
267 | 2.43k | // Aside from function order, the order of values doesn't matter much here. |
268 | 2.43k | UseListOrderStack Stack; |
269 | 2.43k | |
270 | 2.43k | // We want to visit the functions backward now so we can list function-local |
271 | 2.43k | // constants in the last Function they're used in. Module-level constants |
272 | 2.43k | // have already been visited above. |
273 | 8.32k | for (auto I = M.rbegin(), E = M.rend(); I != E8.32k ; ++I5.89k ) { |
274 | 5.89k | const Function &F = *I; |
275 | 5.89k | if (F.isDeclaration()) |
276 | 1.67k | continue; |
277 | 4.22k | for (const BasicBlock &BB : F) |
278 | 7.18k | predictValueUseListOrder(&BB, &F, OM, Stack); |
279 | 4.22k | for (const Argument &A : F.args()) |
280 | 2.91k | predictValueUseListOrder(&A, &F, OM, Stack); |
281 | 4.22k | for (const BasicBlock &BB : F) |
282 | 7.18k | for (const Instruction &I : BB) |
283 | 24.7k | for (const Value *Op : I.operands()) |
284 | 40.2k | if (40.2k isa<Constant>(*Op) || 40.2k isa<InlineAsm>(*Op)23.8k ) // Visit GlobalValues. |
285 | 16.4k | predictValueUseListOrder(Op, &F, OM, Stack); |
286 | 4.22k | for (const BasicBlock &BB : F) |
287 | 7.18k | for (const Instruction &I : BB) |
288 | 24.7k | predictValueUseListOrder(&I, &F, OM, Stack); |
289 | 5.89k | } |
290 | 2.43k | |
291 | 2.43k | // Visit globals last, since the module-level use-list block will be seen |
292 | 2.43k | // before the function bodies are processed. |
293 | 2.43k | for (const GlobalVariable &G : M.globals()) |
294 | 2.17k | predictValueUseListOrder(&G, nullptr, OM, Stack); |
295 | 2.43k | for (const Function &F : M) |
296 | 5.89k | predictValueUseListOrder(&F, nullptr, OM, Stack); |
297 | 2.43k | for (const GlobalAlias &A : M.aliases()) |
298 | 513 | predictValueUseListOrder(&A, nullptr, OM, Stack); |
299 | 2.43k | for (const GlobalIFunc &I : M.ifuncs()) |
300 | 28 | predictValueUseListOrder(&I, nullptr, OM, Stack); |
301 | 2.43k | for (const GlobalVariable &G : M.globals()) |
302 | 2.17k | if (2.17k G.hasInitializer()2.17k ) |
303 | 1.78k | predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack); |
304 | 2.43k | for (const GlobalAlias &A : M.aliases()) |
305 | 513 | predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack); |
306 | 2.43k | for (const GlobalIFunc &I : M.ifuncs()) |
307 | 28 | predictValueUseListOrder(I.getResolver(), nullptr, OM, Stack); |
308 | 5.89k | for (const Function &F : M) { |
309 | 5.89k | for (const Use &U : F.operands()) |
310 | 321 | predictValueUseListOrder(U.get(), nullptr, OM, Stack); |
311 | 5.89k | } |
312 | 2.43k | |
313 | 2.43k | return Stack; |
314 | 2.43k | } |
315 | | |
316 | 5.78k | static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) { |
317 | 5.78k | return V.first->getType()->isIntOrIntVectorTy(); |
318 | 5.78k | } |
319 | | |
320 | | ValueEnumerator::ValueEnumerator(const Module &M, |
321 | | bool ShouldPreserveUseListOrder) |
322 | 3.22k | : ShouldPreserveUseListOrder(ShouldPreserveUseListOrder) { |
323 | 3.22k | if (ShouldPreserveUseListOrder) |
324 | 2.43k | UseListOrders = predictUseListOrder(M); |
325 | 3.22k | |
326 | 3.22k | // Enumerate the global variables. |
327 | 3.22k | for (const GlobalVariable &GV : M.globals()) |
328 | 4.69k | EnumerateValue(&GV); |
329 | 3.22k | |
330 | 3.22k | // Enumerate the functions. |
331 | 8.90k | for (const Function & F : M) { |
332 | 8.90k | EnumerateValue(&F); |
333 | 8.90k | EnumerateAttributes(F.getAttributes()); |
334 | 8.90k | } |
335 | 3.22k | |
336 | 3.22k | // Enumerate the aliases. |
337 | 3.22k | for (const GlobalAlias &GA : M.aliases()) |
338 | 594 | EnumerateValue(&GA); |
339 | 3.22k | |
340 | 3.22k | // Enumerate the ifuncs. |
341 | 3.22k | for (const GlobalIFunc &GIF : M.ifuncs()) |
342 | 28 | EnumerateValue(&GIF); |
343 | 3.22k | |
344 | 3.22k | // Remember what is the cutoff between globalvalue's and other constants. |
345 | 3.22k | unsigned FirstConstant = Values.size(); |
346 | 3.22k | |
347 | 3.22k | // Enumerate the global variable initializers and attributes. |
348 | 4.69k | for (const GlobalVariable &GV : M.globals()) { |
349 | 4.69k | if (GV.hasInitializer()) |
350 | 3.86k | EnumerateValue(GV.getInitializer()); |
351 | 4.69k | if (GV.hasAttributes()) |
352 | 12 | EnumerateAttributes(GV.getAttributesAsList(AttributeList::FunctionIndex)); |
353 | 4.69k | } |
354 | 3.22k | |
355 | 3.22k | // Enumerate the aliasees. |
356 | 3.22k | for (const GlobalAlias &GA : M.aliases()) |
357 | 594 | EnumerateValue(GA.getAliasee()); |
358 | 3.22k | |
359 | 3.22k | // Enumerate the ifunc resolvers. |
360 | 3.22k | for (const GlobalIFunc &GIF : M.ifuncs()) |
361 | 28 | EnumerateValue(GIF.getResolver()); |
362 | 3.22k | |
363 | 3.22k | // Enumerate any optional Function data. |
364 | 3.22k | for (const Function &F : M) |
365 | 8.90k | for (const Use &U : F.operands()) |
366 | 426 | EnumerateValue(U.get()); |
367 | 3.22k | |
368 | 3.22k | // Enumerate the metadata type. |
369 | 3.22k | // |
370 | 3.22k | // TODO: Move this to ValueEnumerator::EnumerateOperandType() once bitcode |
371 | 3.22k | // only encodes the metadata type when it's used as a value. |
372 | 3.22k | EnumerateType(Type::getMetadataTy(M.getContext())); |
373 | 3.22k | |
374 | 3.22k | // Insert constants and metadata that are named at module level into the slot |
375 | 3.22k | // pool so that the module symbol table can refer to them... |
376 | 3.22k | EnumerateValueSymbolTable(M.getValueSymbolTable()); |
377 | 3.22k | EnumerateNamedMetadata(M); |
378 | 3.22k | |
379 | 3.22k | SmallVector<std::pair<unsigned, MDNode *>, 8> MDs; |
380 | 4.69k | for (const GlobalVariable &GV : M.globals()) { |
381 | 4.69k | MDs.clear(); |
382 | 4.69k | GV.getAllMetadata(MDs); |
383 | 4.69k | for (const auto &I : MDs) |
384 | 4.69k | // FIXME: Pass GV to EnumerateMetadata and arrange for the bitcode writer |
385 | 4.69k | // to write metadata to the global variable's own metadata block |
386 | 4.69k | // (PR28134). |
387 | 83 | EnumerateMetadata(nullptr, I.second); |
388 | 4.69k | } |
389 | 3.22k | |
390 | 3.22k | // Enumerate types used by function bodies and argument lists. |
391 | 8.90k | for (const Function &F : M) { |
392 | 8.90k | for (const Argument &A : F.args()) |
393 | 7.72k | EnumerateType(A.getType()); |
394 | 8.90k | |
395 | 8.90k | // Enumerate metadata attached to this function. |
396 | 8.90k | MDs.clear(); |
397 | 8.90k | F.getAllMetadata(MDs); |
398 | 8.90k | for (const auto &I : MDs) |
399 | 366 | EnumerateMetadata(F.isDeclaration() ? 366 nullptr11 : &F355 , I.second); |
400 | 8.90k | |
401 | 8.90k | for (const BasicBlock &BB : F) |
402 | 10.6k | for (const Instruction &I : BB) 10.6k { |
403 | 85.3k | for (const Use &Op : I.operands()) { |
404 | 85.3k | auto *MD = dyn_cast<MetadataAsValue>(&Op); |
405 | 85.3k | if (!MD85.3k ) { |
406 | 84.5k | EnumerateOperandType(Op); |
407 | 84.5k | continue; |
408 | 84.5k | } |
409 | 814 | |
410 | 814 | // Local metadata is enumerated during function-incorporation. |
411 | 814 | if (814 isa<LocalAsMetadata>(MD->getMetadata())814 ) |
412 | 250 | continue; |
413 | 564 | |
414 | 564 | EnumerateMetadata(&F, MD->getMetadata()); |
415 | 564 | } |
416 | 51.9k | EnumerateType(I.getType()); |
417 | 51.9k | if (const CallInst *CI = dyn_cast<CallInst>(&I)) |
418 | 7.32k | EnumerateAttributes(CI->getAttributes()); |
419 | 44.6k | else if (const InvokeInst *44.6k II44.6k = dyn_cast<InvokeInst>(&I)) |
420 | 143 | EnumerateAttributes(II->getAttributes()); |
421 | 51.9k | |
422 | 51.9k | // Enumerate metadata attached with this instruction. |
423 | 51.9k | MDs.clear(); |
424 | 51.9k | I.getAllMetadataOtherThanDebugLoc(MDs); |
425 | 52.8k | for (unsigned i = 0, e = MDs.size(); i != e52.8k ; ++i890 ) |
426 | 890 | EnumerateMetadata(&F, MDs[i].second); |
427 | 51.9k | |
428 | 51.9k | // Don't enumerate the location directly -- it has a special record |
429 | 51.9k | // type -- but enumerate its operands. |
430 | 51.9k | if (DILocation *L = I.getDebugLoc()) |
431 | 1.97k | for (const Metadata *Op : L->operands()) |
432 | 2.00k | EnumerateMetadata(&F, Op); |
433 | 10.6k | } |
434 | 8.90k | } |
435 | 3.22k | |
436 | 3.22k | // Optimize constant ordering. |
437 | 3.22k | OptimizeConstants(FirstConstant, Values.size()); |
438 | 3.22k | |
439 | 3.22k | // Organize metadata ordering. |
440 | 3.22k | organizeMetadata(); |
441 | 3.22k | } |
442 | | |
443 | 818 | unsigned ValueEnumerator::getInstructionID(const Instruction *Inst) const { |
444 | 818 | InstructionMapType::const_iterator I = InstructionMap.find(Inst); |
445 | 818 | assert(I != InstructionMap.end() && "Instruction is not mapped!"); |
446 | 818 | return I->second; |
447 | 818 | } |
448 | | |
449 | 717 | unsigned ValueEnumerator::getComdatID(const Comdat *C) const { |
450 | 717 | unsigned ComdatID = Comdats.idFor(C); |
451 | 717 | assert(ComdatID && "Comdat not found!"); |
452 | 717 | return ComdatID; |
453 | 717 | } |
454 | | |
455 | 51.9k | void ValueEnumerator::setInstructionID(const Instruction *I) { |
456 | 51.9k | InstructionMap[I] = InstructionCount++; |
457 | 51.9k | } |
458 | | |
459 | 150k | unsigned ValueEnumerator::getValueID(const Value *V) const { |
460 | 150k | if (auto *MD = dyn_cast<MetadataAsValue>(V)) |
461 | 814 | return getMetadataID(MD->getMetadata()); |
462 | 149k | |
463 | 149k | ValueMapType::const_iterator I = ValueMap.find(V); |
464 | 149k | assert(I != ValueMap.end() && "Value not in slotcalculator!"); |
465 | 149k | return I->second-1; |
466 | 149k | } |
467 | | |
468 | | #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) |
469 | | LLVM_DUMP_METHOD void ValueEnumerator::dump() const { |
470 | | print(dbgs(), ValueMap, "Default"); |
471 | | dbgs() << '\n'; |
472 | | print(dbgs(), MetadataMap, "MetaData"); |
473 | | dbgs() << '\n'; |
474 | | } |
475 | | #endif |
476 | | |
477 | | void ValueEnumerator::print(raw_ostream &OS, const ValueMapType &Map, |
478 | 0 | const char *Name) const { |
479 | 0 | OS << "Map Name: " << Name << "\n"; |
480 | 0 | OS << "Size: " << Map.size() << "\n"; |
481 | 0 | for (ValueMapType::const_iterator I = Map.begin(), |
482 | 0 | E = Map.end(); I != E0 ; ++I0 ) { |
483 | 0 | const Value *V = I->first; |
484 | 0 | if (V->hasName()) |
485 | 0 | OS << "Value: " << V->getName(); |
486 | 0 | else |
487 | 0 | OS << "Value: [null]\n"; |
488 | 0 | V->print(errs()); |
489 | 0 | errs() << '\n'; |
490 | 0 |
|
491 | 0 | OS << " Uses(" << std::distance(V->use_begin(),V->use_end()) << "):"; |
492 | 0 | for (const Use &U : V->uses()) { |
493 | 0 | if (&U != &*V->use_begin()) |
494 | 0 | OS << ","; |
495 | 0 | if(U->hasName()) |
496 | 0 | OS << " " << U->getName(); |
497 | 0 | else |
498 | 0 | OS << " [null]"; |
499 | 0 |
|
500 | 0 | } |
501 | 0 | OS << "\n\n"; |
502 | 0 | } |
503 | 0 | } |
504 | | |
505 | | void ValueEnumerator::print(raw_ostream &OS, const MetadataMapType &Map, |
506 | 0 | const char *Name) const { |
507 | 0 | OS << "Map Name: " << Name << "\n"; |
508 | 0 | OS << "Size: " << Map.size() << "\n"; |
509 | 0 | for (auto I = Map.begin(), E = Map.end(); I != E0 ; ++I0 ) { |
510 | 0 | const Metadata *MD = I->first; |
511 | 0 | OS << "Metadata: slot = " << I->second.ID << "\n"; |
512 | 0 | OS << "Metadata: function = " << I->second.F << "\n"; |
513 | 0 | MD->print(OS); |
514 | 0 | OS << "\n"; |
515 | 0 | } |
516 | 0 | } |
517 | | |
518 | | /// OptimizeConstants - Reorder constant pool for denser encoding. |
519 | 9.67k | void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) { |
520 | 9.67k | if (CstStart == CstEnd || 9.67k CstStart+1 == CstEnd4.37k ) return7.33k ; |
521 | 2.34k | |
522 | 2.34k | if (2.34k ShouldPreserveUseListOrder2.34k ) |
523 | 2.34k | // Optimizing constants makes the use-list order difficult to predict. |
524 | 2.34k | // Disable it for now when trying to preserve the order. |
525 | 1.64k | return; |
526 | 701 | |
527 | 701 | std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd, |
528 | 701 | [this](const std::pair<const Value *, unsigned> &LHS, |
529 | 15.2k | const std::pair<const Value *, unsigned> &RHS) { |
530 | 15.2k | // Sort by plane. |
531 | 15.2k | if (LHS.first->getType() != RHS.first->getType()) |
532 | 10.7k | return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType()); |
533 | 4.49k | // Then by frequency. |
534 | 4.49k | return LHS.second > RHS.second; |
535 | 4.49k | }); |
536 | 701 | |
537 | 701 | // Ensure that integer and vector of integer constants are at the start of the |
538 | 701 | // constant pool. This is important so that GEP structure indices come before |
539 | 701 | // gep constant exprs. |
540 | 701 | std::stable_partition(Values.begin() + CstStart, Values.begin() + CstEnd, |
541 | 701 | isIntOrIntVectorValue); |
542 | 701 | |
543 | 701 | // Rebuild the modified portion of ValueMap. |
544 | 6.48k | for (; CstStart != CstEnd6.48k ; ++CstStart5.78k ) |
545 | 5.78k | ValueMap[Values[CstStart].first] = CstStart+1; |
546 | 9.67k | } |
547 | | |
548 | | /// EnumerateValueSymbolTable - Insert all of the values in the specified symbol |
549 | | /// table into the values table. |
550 | 3.23k | void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) { |
551 | 3.23k | for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end(); |
552 | 17.1k | VI != VE17.1k ; ++VI13.9k ) |
553 | 13.9k | EnumerateValue(VI->getValue()); |
554 | 3.23k | } |
555 | | |
556 | | /// Insert all of the values referenced by named metadata in the specified |
557 | | /// module. |
558 | 3.23k | void ValueEnumerator::EnumerateNamedMetadata(const Module &M) { |
559 | 3.23k | for (const auto &I : M.named_metadata()) |
560 | 1.62k | EnumerateNamedMDNode(&I); |
561 | 3.23k | } |
562 | | |
563 | 1.62k | void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) { |
564 | 5.33k | for (unsigned i = 0, e = MD->getNumOperands(); i != e5.33k ; ++i3.71k ) |
565 | 3.71k | EnumerateMetadata(nullptr, MD->getOperand(i)); |
566 | 1.62k | } |
567 | | |
568 | 7.86k | unsigned ValueEnumerator::getMetadataFunctionID(const Function *F) const { |
569 | 7.86k | return F ? getValueID(F) + 14.06k : 03.80k ; |
570 | 7.86k | } |
571 | | |
572 | 7.61k | void ValueEnumerator::EnumerateMetadata(const Function *F, const Metadata *MD) { |
573 | 7.61k | EnumerateMetadata(getMetadataFunctionID(F), MD); |
574 | 7.61k | } |
575 | | |
576 | | void ValueEnumerator::EnumerateFunctionLocalMetadata( |
577 | 250 | const Function &F, const LocalAsMetadata *Local) { |
578 | 250 | EnumerateFunctionLocalMetadata(getMetadataFunctionID(&F), Local); |
579 | 250 | } |
580 | | |
581 | | void ValueEnumerator::dropFunctionFromMetadata( |
582 | 151 | MetadataMapType::value_type &FirstMD) { |
583 | 151 | SmallVector<const MDNode *, 64> Worklist; |
584 | 404 | auto push = [&Worklist](MetadataMapType::value_type &MD) { |
585 | 404 | auto &Entry = MD.second; |
586 | 404 | |
587 | 404 | // Nothing to do if this metadata isn't tagged. |
588 | 404 | if (!Entry.F) |
589 | 110 | return; |
590 | 294 | |
591 | 294 | // Drop the function tag. |
592 | 294 | Entry.F = 0; |
593 | 294 | |
594 | 294 | // If this is has an ID and is an MDNode, then its operands have entries as |
595 | 294 | // well. We need to drop the function from them too. |
596 | 294 | if (Entry.ID) |
597 | 294 | if (auto *294 N294 = dyn_cast<MDNode>(MD.first)) |
598 | 147 | Worklist.push_back(N); |
599 | 404 | }; |
600 | 151 | push(FirstMD); |
601 | 298 | while (!Worklist.empty()) |
602 | 147 | for (const Metadata *Op : Worklist.pop_back_val()->operands()) 147 { |
603 | 451 | if (!Op) |
604 | 198 | continue; |
605 | 253 | auto MD = MetadataMap.find(Op); |
606 | 253 | if (MD != MetadataMap.end()) |
607 | 253 | push(*MD); |
608 | 147 | } |
609 | 151 | } |
610 | | |
611 | 7.61k | void ValueEnumerator::EnumerateMetadata(unsigned F, const Metadata *MD) { |
612 | 7.61k | // It's vital for reader efficiency that uniqued subgraphs are done in |
613 | 7.61k | // post-order; it's expensive when their operands have forward references. |
614 | 7.61k | // If a distinct node is referenced from a uniqued node, it'll be delayed |
615 | 7.61k | // until the uniqued subgraph has been completely traversed. |
616 | 7.61k | SmallVector<const MDNode *, 32> DelayedDistinctNodes; |
617 | 7.61k | |
618 | 7.61k | // Start by enumerating MD, and then work through its transitive operands in |
619 | 7.61k | // post-order. This requires a depth-first search. |
620 | 7.61k | SmallVector<std::pair<const MDNode *, MDNode::op_iterator>, 32> Worklist; |
621 | 7.61k | if (const MDNode *N = enumerateMetadataImpl(F, MD)) |
622 | 4.62k | Worklist.push_back(std::make_pair(N, N->op_begin())); |
623 | 7.61k | |
624 | 18.8k | while (!Worklist.empty()18.8k ) { |
625 | 11.2k | const MDNode *N = Worklist.back().first; |
626 | 11.2k | |
627 | 11.2k | // Enumerate operands until we hit a new node. We need to traverse these |
628 | 11.2k | // nodes' operands before visiting the rest of N's operands. |
629 | 11.2k | MDNode::op_iterator I = std::find_if( |
630 | 11.2k | Worklist.back().second, N->op_end(), |
631 | 29.6k | [&](const Metadata *MD) { return enumerateMetadataImpl(F, MD); }); |
632 | 11.2k | if (I != N->op_end()11.2k ) { |
633 | 3.30k | auto *Op = cast<MDNode>(*I); |
634 | 3.30k | Worklist.back().second = ++I; |
635 | 3.30k | |
636 | 3.30k | // Delay traversing Op if it's a distinct node and N is uniqued. |
637 | 3.30k | if (Op->isDistinct() && 3.30k !N->isDistinct()245 ) |
638 | 162 | DelayedDistinctNodes.push_back(Op); |
639 | 3.30k | else |
640 | 3.14k | Worklist.push_back(std::make_pair(Op, Op->op_begin())); |
641 | 3.30k | continue; |
642 | 3.30k | } |
643 | 7.92k | |
644 | 7.92k | // All the operands have been visited. Now assign an ID. |
645 | 7.92k | Worklist.pop_back(); |
646 | 7.92k | MDs.push_back(N); |
647 | 7.92k | MetadataMap[N].ID = MDs.size(); |
648 | 7.92k | |
649 | 7.92k | // Flush out any delayed distinct nodes; these are all the distinct nodes |
650 | 7.92k | // that are leaves in last uniqued subgraph. |
651 | 7.92k | if (Worklist.empty() || 7.92k Worklist.back().first->isDistinct()3.25k ) { |
652 | 5.77k | for (const MDNode *N : DelayedDistinctNodes) |
653 | 162 | Worklist.push_back(std::make_pair(N, N->op_begin())); |
654 | 5.77k | DelayedDistinctNodes.clear(); |
655 | 5.77k | } |
656 | 11.2k | } |
657 | 7.61k | } |
658 | | |
659 | 37.2k | const MDNode *ValueEnumerator::enumerateMetadataImpl(unsigned F, const Metadata *MD) { |
660 | 37.2k | if (!MD) |
661 | 9.65k | return nullptr; |
662 | 27.5k | |
663 | 37.2k | assert( |
664 | 27.5k | (isa<MDNode>(MD) || isa<MDString>(MD) || isa<ConstantAsMetadata>(MD)) && |
665 | 27.5k | "Invalid metadata kind"); |
666 | 27.5k | |
667 | 27.5k | auto Insertion = MetadataMap.insert(std::make_pair(MD, MDIndex(F))); |
668 | 27.5k | MDIndex &Entry = Insertion.first->second; |
669 | 27.5k | if (!Insertion.second27.5k ) { |
670 | 10.8k | // Already mapped. If F doesn't match the function tag, drop it. |
671 | 10.8k | if (Entry.hasDifferentFunction(F)) |
672 | 151 | dropFunctionFromMetadata(*Insertion.first); |
673 | 10.8k | return nullptr; |
674 | 10.8k | } |
675 | 16.7k | |
676 | 16.7k | // Don't assign IDs to metadata nodes. |
677 | 16.7k | if (auto *16.7k N16.7k = dyn_cast<MDNode>(MD)) |
678 | 7.92k | return N; |
679 | 8.79k | |
680 | 8.79k | // Save the metadata. |
681 | 8.79k | MDs.push_back(MD); |
682 | 8.79k | Entry.ID = MDs.size(); |
683 | 8.79k | |
684 | 8.79k | // Enumerate the constant, if any. |
685 | 8.79k | if (auto *C = dyn_cast<ConstantAsMetadata>(MD)) |
686 | 3.76k | EnumerateValue(C->getValue()); |
687 | 37.2k | |
688 | 37.2k | return nullptr; |
689 | 37.2k | } |
690 | | |
691 | | /// EnumerateFunctionLocalMetadataa - Incorporate function-local metadata |
692 | | /// information reachable from the metadata. |
693 | | void ValueEnumerator::EnumerateFunctionLocalMetadata( |
694 | 250 | unsigned F, const LocalAsMetadata *Local) { |
695 | 250 | assert(F && "Expected a function"); |
696 | 250 | |
697 | 250 | // Check to see if it's already in! |
698 | 250 | MDIndex &Index = MetadataMap[Local]; |
699 | 250 | if (Index.ID250 ) { |
700 | 3 | assert(Index.F == F && "Expected the same function"); |
701 | 3 | return; |
702 | 3 | } |
703 | 247 | |
704 | 247 | MDs.push_back(Local); |
705 | 247 | Index.F = F; |
706 | 247 | Index.ID = MDs.size(); |
707 | 247 | |
708 | 247 | EnumerateValue(Local->getValue()); |
709 | 247 | } |
710 | | |
711 | 153k | static unsigned getMetadataTypeOrder(const Metadata *MD) { |
712 | 153k | // Strings are emitted in bulk and must come first. |
713 | 153k | if (isa<MDString>(MD)) |
714 | 39.4k | return 0; |
715 | 113k | |
716 | 113k | // ConstantAsMetadata doesn't reference anything. We may as well shuffle it |
717 | 113k | // to the front since we can detect it. |
718 | 113k | auto *N = dyn_cast<MDNode>(MD); |
719 | 113k | if (!N) |
720 | 43.4k | return 1; |
721 | 70.3k | |
722 | 70.3k | // The reader is fast forward references for distinct node operands, but slow |
723 | 70.3k | // when uniqued operands are unresolved. |
724 | 70.3k | return N->isDistinct() ? 70.3k 211.8k : 358.4k ; |
725 | 153k | } |
726 | | |
727 | 3.23k | void ValueEnumerator::organizeMetadata() { |
728 | 3.23k | assert(MetadataMap.size() == MDs.size() && |
729 | 3.23k | "Metadata map and vector out of sync"); |
730 | 3.23k | |
731 | 3.23k | if (MDs.empty()) |
732 | 2.36k | return; |
733 | 871 | |
734 | 871 | // Copy out the index information from MetadataMap in order to choose a new |
735 | 871 | // order. |
736 | 871 | SmallVector<MDIndex, 64> Order; |
737 | 871 | Order.reserve(MetadataMap.size()); |
738 | 871 | for (const Metadata *MD : MDs) |
739 | 16.7k | Order.push_back(MetadataMap.lookup(MD)); |
740 | 871 | |
741 | 871 | // Partition: |
742 | 871 | // - by function, then |
743 | 871 | // - by isa<MDString> |
744 | 871 | // and then sort by the original/current ID. Since the IDs are guaranteed to |
745 | 871 | // be unique, the result of std::sort will be deterministic. There's no need |
746 | 871 | // for std::stable_sort. |
747 | 76.5k | std::sort(Order.begin(), Order.end(), [this](MDIndex LHS, MDIndex RHS) { |
748 | 76.5k | return std::make_tuple(LHS.F, getMetadataTypeOrder(LHS.get(MDs)), LHS.ID) < |
749 | 76.5k | std::make_tuple(RHS.F, getMetadataTypeOrder(RHS.get(MDs)), RHS.ID); |
750 | 76.5k | }); |
751 | 871 | |
752 | 871 | // Rebuild MDs, index the metadata ranges for each function in FunctionMDs, |
753 | 871 | // and fix up MetadataMap. |
754 | 871 | std::vector<const Metadata *> OldMDs = std::move(MDs); |
755 | 871 | MDs.reserve(OldMDs.size()); |
756 | 13.9k | for (unsigned I = 0, E = Order.size(); I != E && 13.9k !Order[I].F13.4k ; ++I13.1k ) { |
757 | 13.1k | auto *MD = Order[I].get(OldMDs); |
758 | 13.1k | MDs.push_back(MD); |
759 | 13.1k | MetadataMap[MD].ID = I + 1; |
760 | 13.1k | if (isa<MDString>(MD)) |
761 | 3.83k | ++NumMDStrings; |
762 | 13.1k | } |
763 | 871 | |
764 | 871 | // Return early if there's nothing for the functions. |
765 | 871 | if (MDs.size() == Order.size()) |
766 | 518 | return; |
767 | 353 | |
768 | 353 | // Build the function metadata ranges. |
769 | 353 | MDRange R; |
770 | 353 | FunctionMDs.reserve(OldMDs.size()); |
771 | 353 | unsigned PrevF = 0; |
772 | 3.97k | for (unsigned I = MDs.size(), E = Order.size(), ID = MDs.size(); I != E; |
773 | 3.61k | ++I3.61k ) { |
774 | 3.61k | unsigned F = Order[I].F; |
775 | 3.61k | if (!PrevF3.61k ) { |
776 | 353 | PrevF = F; |
777 | 3.61k | } else if (3.26k PrevF != F3.26k ) { |
778 | 77 | R.Last = FunctionMDs.size(); |
779 | 77 | std::swap(R, FunctionMDInfo[PrevF]); |
780 | 77 | R.First = FunctionMDs.size(); |
781 | 77 | |
782 | 77 | ID = MDs.size(); |
783 | 77 | PrevF = F; |
784 | 77 | } |
785 | 3.61k | |
786 | 3.61k | auto *MD = Order[I].get(OldMDs); |
787 | 3.61k | FunctionMDs.push_back(MD); |
788 | 3.61k | MetadataMap[MD].ID = ++ID; |
789 | 3.61k | if (isa<MDString>(MD)) |
790 | 1.20k | ++R.NumStrings; |
791 | 3.61k | } |
792 | 3.23k | R.Last = FunctionMDs.size(); |
793 | 3.23k | FunctionMDInfo[PrevF] = R; |
794 | 3.23k | } |
795 | | |
796 | 6.44k | void ValueEnumerator::incorporateFunctionMetadata(const Function &F) { |
797 | 6.44k | NumModuleMDs = MDs.size(); |
798 | 6.44k | |
799 | 6.44k | auto R = FunctionMDInfo.lookup(getValueID(&F) + 1); |
800 | 6.44k | NumMDStrings = R.NumStrings; |
801 | 6.44k | MDs.insert(MDs.end(), FunctionMDs.begin() + R.First, |
802 | 6.44k | FunctionMDs.begin() + R.Last); |
803 | 6.44k | } |
804 | | |
805 | 104k | void ValueEnumerator::EnumerateValue(const Value *V) { |
806 | 104k | assert(!V->getType()->isVoidTy() && "Can't insert void values!"); |
807 | 104k | assert(!isa<MetadataAsValue>(V) && "EnumerateValue doesn't handle Metadata!"); |
808 | 104k | |
809 | 104k | // Check to see if it's already in! |
810 | 104k | unsigned &ValueID = ValueMap[V]; |
811 | 104k | if (ValueID104k ) { |
812 | 38.6k | // Increment use count. |
813 | 38.6k | Values[ValueID-1].second++; |
814 | 38.6k | return; |
815 | 38.6k | } |
816 | 66.1k | |
817 | 66.1k | if (auto *66.1k GO66.1k = dyn_cast<GlobalObject>(V)) |
818 | 13.5k | if (const Comdat *13.5k C13.5k = GO->getComdat()) |
819 | 717 | Comdats.insert(C); |
820 | 66.1k | |
821 | 66.1k | // Enumerate the type of this value. |
822 | 66.1k | EnumerateType(V->getType()); |
823 | 66.1k | |
824 | 66.1k | if (const Constant *C66.1k = dyn_cast<Constant>(V)) { |
825 | 32.7k | if (isa<GlobalValue>(C)32.7k ) { |
826 | 14.2k | // Initializers for globals are handled explicitly elsewhere. |
827 | 32.7k | } else if (18.5k C->getNumOperands()18.5k ) { |
828 | 3.91k | // If a constant has operands, enumerate them. This makes sure that if a |
829 | 3.91k | // constant has uses (for example an array of const ints), that they are |
830 | 3.91k | // inserted also. |
831 | 3.91k | |
832 | 3.91k | // We prefer to enumerate them with values before we enumerate the user |
833 | 3.91k | // itself. This makes it more likely that we can avoid forward references |
834 | 3.91k | // in the reader. We know that there can be no cycles in the constants |
835 | 3.91k | // graph that don't go through a global variable. |
836 | 3.91k | for (User::const_op_iterator I = C->op_begin(), E = C->op_end(); |
837 | 13.4k | I != E13.4k ; ++I9.56k ) |
838 | 9.56k | if (9.56k !isa<BasicBlock>(*I)9.56k ) // Don't enumerate BB operand to BlockAddress. |
839 | 9.49k | EnumerateValue(*I); |
840 | 18.5k | |
841 | 18.5k | // Finally, add the value. Doing this could make the ValueID reference be |
842 | 18.5k | // dangling, don't reuse it. |
843 | 18.5k | Values.push_back(std::make_pair(V, 1U)); |
844 | 18.5k | ValueMap[V] = Values.size(); |
845 | 18.5k | return; |
846 | 18.5k | } |
847 | 62.1k | } |
848 | 62.1k | |
849 | 62.1k | // Add the value. |
850 | 62.1k | Values.push_back(std::make_pair(V, 1U)); |
851 | 62.1k | ValueID = Values.size(); |
852 | 62.1k | } |
853 | | |
854 | | |
855 | 252k | void ValueEnumerator::EnumerateType(Type *Ty) { |
856 | 252k | unsigned *TypeID = &TypeMap[Ty]; |
857 | 252k | |
858 | 252k | // We've already seen this type. |
859 | 252k | if (*TypeID) |
860 | 219k | return; |
861 | 32.9k | |
862 | 32.9k | // If it is a non-anonymous struct, mark the type as being visited so that we |
863 | 32.9k | // don't recursively visit it. This is safe because we allow forward |
864 | 32.9k | // references of these in the bitcode reader. |
865 | 32.9k | if (StructType *32.9k STy32.9k = dyn_cast<StructType>(Ty)) |
866 | 1.98k | if (1.98k !STy->isLiteral()1.98k ) |
867 | 1.53k | *TypeID = ~0U; |
868 | 32.9k | |
869 | 32.9k | // Enumerate all of the subtypes before we enumerate this type. This ensures |
870 | 32.9k | // that the type will be enumerated in an order that can be directly built. |
871 | 32.9k | for (Type *SubTy : Ty->subtypes()) |
872 | 35.4k | EnumerateType(SubTy); |
873 | 32.9k | |
874 | 32.9k | // Refresh the TypeID pointer in case the table rehashed. |
875 | 32.9k | TypeID = &TypeMap[Ty]; |
876 | 32.9k | |
877 | 32.9k | // Check to see if we got the pointer another way. This can happen when |
878 | 32.9k | // enumerating recursive types that hit the base case deeper than they start. |
879 | 32.9k | // |
880 | 32.9k | // If this is actually a struct that we are treating as forward ref'able, |
881 | 32.9k | // then emit the definition now that all of its contents are available. |
882 | 32.9k | if (*TypeID && 32.9k *TypeID != ~0U1.71k ) |
883 | 179 | return; |
884 | 32.7k | |
885 | 32.7k | // Add this type now that its contents are all happily enumerated. |
886 | 32.7k | Types.push_back(Ty); |
887 | 32.7k | |
888 | 32.7k | *TypeID = Types.size(); |
889 | 32.7k | } |
890 | | |
891 | | // Enumerate the types for the specified value. If the value is a constant, |
892 | | // walk through it, enumerating the types of the constant. |
893 | 88.1k | void ValueEnumerator::EnumerateOperandType(const Value *V) { |
894 | 88.1k | EnumerateType(V->getType()); |
895 | 88.1k | |
896 | 88.1k | assert(!isa<MetadataAsValue>(V) && "Unexpected metadata operand"); |
897 | 88.1k | |
898 | 88.1k | const Constant *C = dyn_cast<Constant>(V); |
899 | 88.1k | if (!C) |
900 | 50.2k | return; |
901 | 37.9k | |
902 | 37.9k | // If this constant is already enumerated, ignore it, we know its type must |
903 | 37.9k | // be enumerated. |
904 | 37.9k | if (37.9k ValueMap.count(C)37.9k ) |
905 | 23.7k | return; |
906 | 14.1k | |
907 | 14.1k | // This constant may have operands, make sure to enumerate the types in |
908 | 14.1k | // them. |
909 | 14.1k | for (const Value *Op : C->operands()) 14.1k { |
910 | 3.73k | // Don't enumerate basic blocks here, this happens as operands to |
911 | 3.73k | // blockaddress. |
912 | 3.73k | if (isa<BasicBlock>(Op)) |
913 | 54 | continue; |
914 | 3.67k | |
915 | 3.67k | EnumerateOperandType(Op); |
916 | 3.67k | } |
917 | 88.1k | } |
918 | | |
919 | 22.8k | void ValueEnumerator::EnumerateAttributes(AttributeList PAL) { |
920 | 22.8k | if (PAL.isEmpty()22.8k ) return16.2k ; // null is always 0. |
921 | 6.55k | |
922 | 6.55k | // Do a lookup. |
923 | 6.55k | unsigned &Entry = AttributeListMap[PAL]; |
924 | 6.55k | if (Entry == 06.55k ) { |
925 | 1.99k | // Never saw this before, add it. |
926 | 1.99k | AttributeLists.push_back(PAL); |
927 | 1.99k | Entry = AttributeLists.size(); |
928 | 1.99k | } |
929 | 6.55k | |
930 | 6.55k | // Do lookups for all attribute groups. |
931 | 17.5k | for (unsigned i = PAL.index_begin(), e = PAL.index_end(); i != e17.5k ; ++i11.0k ) { |
932 | 11.0k | AttributeSet AS = PAL.getAttributes(i); |
933 | 11.0k | if (!AS.hasAttributes()) |
934 | 2.35k | continue; |
935 | 8.68k | IndexAndAttrSet Pair = {i, AS}; |
936 | 8.68k | unsigned &Entry = AttributeGroupMap[Pair]; |
937 | 8.68k | if (Entry == 08.68k ) { |
938 | 2.28k | AttributeGroups.push_back(Pair); |
939 | 2.28k | Entry = AttributeGroups.size(); |
940 | 2.28k | } |
941 | 11.0k | } |
942 | 22.8k | } |
943 | | |
944 | 6.44k | void ValueEnumerator::incorporateFunction(const Function &F) { |
945 | 6.44k | InstructionCount = 0; |
946 | 6.44k | NumModuleValues = Values.size(); |
947 | 6.44k | |
948 | 6.44k | // Add global metadata to the function block. This doesn't include |
949 | 6.44k | // LocalAsMetadata. |
950 | 6.44k | incorporateFunctionMetadata(F); |
951 | 6.44k | |
952 | 6.44k | // Adding function arguments to the value table. |
953 | 6.44k | for (const auto &I : F.args()) |
954 | 5.08k | EnumerateValue(&I); |
955 | 6.44k | |
956 | 6.44k | FirstFuncConstantID = Values.size(); |
957 | 6.44k | |
958 | 6.44k | // Add all function-level constants to the value table. |
959 | 10.6k | for (const BasicBlock &BB : F) { |
960 | 10.6k | for (const Instruction &I : BB) |
961 | 51.9k | for (const Use &OI : I.operands()) 51.9k { |
962 | 85.3k | if ((isa<Constant>(OI) && 85.3k !isa<GlobalValue>(OI)34.2k ) || isa<InlineAsm>(OI)60.4k ) |
963 | 24.9k | EnumerateValue(OI); |
964 | 51.9k | } |
965 | 10.6k | BasicBlocks.push_back(&BB); |
966 | 10.6k | ValueMap[&BB] = BasicBlocks.size(); |
967 | 10.6k | } |
968 | 6.44k | |
969 | 6.44k | // Optimize the constant layout. |
970 | 6.44k | OptimizeConstants(FirstFuncConstantID, Values.size()); |
971 | 6.44k | |
972 | 6.44k | // Add the function's parameter attributes so they are available for use in |
973 | 6.44k | // the function's instruction. |
974 | 6.44k | EnumerateAttributes(F.getAttributes()); |
975 | 6.44k | |
976 | 6.44k | FirstInstID = Values.size(); |
977 | 6.44k | |
978 | 6.44k | SmallVector<LocalAsMetadata *, 8> FnLocalMDVector; |
979 | 6.44k | // Add all of the instructions. |
980 | 10.6k | for (const BasicBlock &BB : F) { |
981 | 51.9k | for (const Instruction &I : BB) { |
982 | 85.3k | for (const Use &OI : I.operands()) { |
983 | 85.3k | if (auto *MD = dyn_cast<MetadataAsValue>(&OI)) |
984 | 814 | if (auto *814 Local814 = dyn_cast<LocalAsMetadata>(MD->getMetadata())) |
985 | 814 | // Enumerate metadata after the instructions they might refer to. |
986 | 250 | FnLocalMDVector.push_back(Local); |
987 | 85.3k | } |
988 | 51.9k | |
989 | 51.9k | if (!I.getType()->isVoidTy()) |
990 | 28.1k | EnumerateValue(&I); |
991 | 51.9k | } |
992 | 10.6k | } |
993 | 6.44k | |
994 | 6.44k | // Add all of the function-local metadata. |
995 | 6.69k | for (unsigned i = 0, e = FnLocalMDVector.size(); i != e6.69k ; ++i250 ) { |
996 | 250 | // At this point, every local values have been incorporated, we shouldn't |
997 | 250 | // have a metadata operand that references a value that hasn't been seen. |
998 | 250 | assert(ValueMap.count(FnLocalMDVector[i]->getValue()) && |
999 | 250 | "Missing value for metadata operand"); |
1000 | 250 | EnumerateFunctionLocalMetadata(F, FnLocalMDVector[i]); |
1001 | 250 | } |
1002 | 6.44k | } |
1003 | | |
1004 | 6.44k | void ValueEnumerator::purgeFunction() { |
1005 | 6.44k | /// Remove purged values from the ValueMap. |
1006 | 50.0k | for (unsigned i = NumModuleValues, e = Values.size(); i != e50.0k ; ++i43.5k ) |
1007 | 43.5k | ValueMap.erase(Values[i].first); |
1008 | 10.2k | for (unsigned i = NumModuleMDs, e = MDs.size(); i != e10.2k ; ++i3.84k ) |
1009 | 3.84k | MetadataMap.erase(MDs[i]); |
1010 | 17.0k | for (unsigned i = 0, e = BasicBlocks.size(); i != e17.0k ; ++i10.6k ) |
1011 | 10.6k | ValueMap.erase(BasicBlocks[i]); |
1012 | 6.44k | |
1013 | 6.44k | Values.resize(NumModuleValues); |
1014 | 6.44k | MDs.resize(NumModuleMDs); |
1015 | 6.44k | BasicBlocks.clear(); |
1016 | 6.44k | NumMDStrings = 0; |
1017 | 6.44k | } |
1018 | | |
1019 | | static void IncorporateFunctionInfoGlobalBBIDs(const Function *F, |
1020 | 50 | DenseMap<const BasicBlock*, unsigned> &IDMap) { |
1021 | 50 | unsigned Counter = 0; |
1022 | 50 | for (const BasicBlock &BB : *F) |
1023 | 185 | IDMap[&BB] = ++Counter; |
1024 | 50 | } |
1025 | | |
1026 | | /// getGlobalBasicBlockID - This returns the function-specific ID for the |
1027 | | /// specified basic block. This is relatively expensive information, so it |
1028 | | /// should only be used by rare constructs such as address-of-label. |
1029 | 125 | unsigned ValueEnumerator::getGlobalBasicBlockID(const BasicBlock *BB) const { |
1030 | 125 | unsigned &Idx = GlobalBasicBlockIDs[BB]; |
1031 | 125 | if (Idx != 0) |
1032 | 75 | return Idx-1; |
1033 | 50 | |
1034 | 50 | IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs); |
1035 | 50 | return getGlobalBasicBlockID(BB); |
1036 | 50 | } |
1037 | | |
1038 | 16.1k | uint64_t ValueEnumerator::computeBitsRequiredForTypeIndicies() const { |
1039 | 16.1k | return Log2_32_Ceil(getTypes().size() + 1); |
1040 | 16.1k | } |