Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Bitcode/Writer/ValueEnumerator.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- ValueEnumerator.cpp - Number values and types for bitcode writer ---===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file implements the ValueEnumerator class.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "ValueEnumerator.h"
14
#include "llvm/ADT/DenseMap.h"
15
#include "llvm/ADT/SmallVector.h"
16
#include "llvm/Config/llvm-config.h"
17
#include "llvm/IR/Argument.h"
18
#include "llvm/IR/Attributes.h"
19
#include "llvm/IR/BasicBlock.h"
20
#include "llvm/IR/Constant.h"
21
#include "llvm/IR/DebugInfoMetadata.h"
22
#include "llvm/IR/DerivedTypes.h"
23
#include "llvm/IR/Function.h"
24
#include "llvm/IR/GlobalAlias.h"
25
#include "llvm/IR/GlobalIFunc.h"
26
#include "llvm/IR/GlobalObject.h"
27
#include "llvm/IR/GlobalValue.h"
28
#include "llvm/IR/GlobalVariable.h"
29
#include "llvm/IR/Instruction.h"
30
#include "llvm/IR/Instructions.h"
31
#include "llvm/IR/Metadata.h"
32
#include "llvm/IR/Module.h"
33
#include "llvm/IR/Type.h"
34
#include "llvm/IR/Use.h"
35
#include "llvm/IR/UseListOrder.h"
36
#include "llvm/IR/User.h"
37
#include "llvm/IR/Value.h"
38
#include "llvm/IR/ValueSymbolTable.h"
39
#include "llvm/Support/Casting.h"
40
#include "llvm/Support/Compiler.h"
41
#include "llvm/Support/Debug.h"
42
#include "llvm/Support/MathExtras.h"
43
#include "llvm/Support/raw_ostream.h"
44
#include <algorithm>
45
#include <cassert>
46
#include <cstddef>
47
#include <iterator>
48
#include <tuple>
49
#include <utility>
50
#include <vector>
51
52
using namespace llvm;
53
54
namespace {
55
56
struct OrderMap {
57
  DenseMap<const Value *, std::pair<unsigned, bool>> IDs;
58
  unsigned LastGlobalConstantID = 0;
59
  unsigned LastGlobalValueID = 0;
60
61
2.86k
  OrderMap() = default;
62
63
6.54k
  bool isGlobalConstant(unsigned ID) const {
64
6.54k
    return ID <= LastGlobalConstantID;
65
6.54k
  }
66
67
96.5k
  bool isGlobalValue(unsigned ID) const {
68
96.5k
    return ID <= LastGlobalValueID && 
!isGlobalConstant(ID)6.54k
;
69
96.5k
  }
70
71
5.72k
  unsigned size() const { return IDs.size(); }
72
74.9k
  std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; }
73
74
268k
  std::pair<unsigned, bool> lookup(const Value *V) const {
75
268k
    return IDs.lookup(V);
76
268k
  }
77
78
58.3k
  void index(const Value *V) {
79
58.3k
    // Explicitly sequence get-size and insert-value operations to avoid UB.
80
58.3k
    unsigned ID = IDs.size() + 1;
81
58.3k
    IDs[V].first = ID;
82
58.3k
  }
83
};
84
85
} // end anonymous namespace
86
87
64.7k
static void orderValue(const Value *V, OrderMap &OM) {
88
64.7k
  if (OM.lookup(V).first)
89
6.35k
    return;
90
58.3k
91
58.3k
  if (const Constant *C = dyn_cast<Constant>(V))
92
19.6k
    if (C->getNumOperands() && 
!isa<GlobalValue>(C)4.09k
)
93
1.41k
      for (const Value *Op : C->operands())
94
3.08k
        if (!isa<BasicBlock>(Op) && 
!isa<GlobalValue>(Op)3.03k
)
95
2.14k
          orderValue(Op, OM);
96
58.3k
97
58.3k
  // Note: we cannot cache this lookup above, since inserting into the map
98
58.3k
  // changes the map's size, and thus affects the other IDs.
99
58.3k
  OM.index(V);
100
58.3k
}
101
102
2.86k
static OrderMap orderModule(const Module &M) {
103
2.86k
  // This needs to match the order used by ValueEnumerator::ValueEnumerator()
104
2.86k
  // and ValueEnumerator::incorporateFunction().
105
2.86k
  OrderMap OM;
106
2.86k
107
2.86k
  // In the reader, initializers of GlobalValues are set *after* all the
108
2.86k
  // globals have been read.  Rather than awkwardly modeling this behaviour
109
2.86k
  // directly in predictValueUseListOrderImpl(), just assign IDs to
110
2.86k
  // initializers of GlobalValues before GlobalValues themselves to model this
111
2.86k
  // implicitly.
112
2.86k
  for (const GlobalVariable &G : M.globals())
113
2.37k
    if (G.hasInitializer())
114
1.96k
      if (!isa<GlobalValue>(G.getInitializer()))
115
1.76k
        orderValue(G.getInitializer(), OM);
116
2.86k
  for (const GlobalAlias &A : M.aliases())
117
564
    if (!isa<GlobalValue>(A.getAliasee()))
118
277
      orderValue(A.getAliasee(), OM);
119
2.86k
  for (const GlobalIFunc &I : M.ifuncs())
120
35
    if (!isa<GlobalValue>(I.getResolver()))
121
0
      orderValue(I.getResolver(), OM);
122
6.75k
  for (const Function &F : M) {
123
6.75k
    for (const Use &U : F.operands())
124
339
      if (!isa<GlobalValue>(U.get()))
125
290
        orderValue(U.get(), OM);
126
6.75k
  }
127
2.86k
  OM.LastGlobalConstantID = OM.size();
128
2.86k
129
2.86k
  // Initializers of GlobalValues are processed in
130
2.86k
  // BitcodeReader::ResolveGlobalAndAliasInits().  Match the order there rather
131
2.86k
  // than ValueEnumerator, and match the code in predictValueUseListOrderImpl()
132
2.86k
  // by giving IDs in reverse order.
133
2.86k
  //
134
2.86k
  // Since GlobalValues never reference each other directly (just through
135
2.86k
  // initializers), their relative IDs only matter for determining order of
136
2.86k
  // uses in their initializers.
137
2.86k
  for (const Function &F : M)
138
6.75k
    orderValue(&F, OM);
139
2.86k
  for (const GlobalAlias &A : M.aliases())
140
564
    orderValue(&A, OM);
141
2.86k
  for (const GlobalIFunc &I : M.ifuncs())
142
35
    orderValue(&I, OM);
143
2.86k
  for (const GlobalVariable &G : M.globals())
144
2.37k
    orderValue(&G, OM);
145
2.86k
  OM.LastGlobalValueID = OM.size();
146
2.86k
147
6.75k
  for (const Function &F : M) {
148
6.75k
    if (F.isDeclaration())
149
1.88k
      continue;
150
4.87k
    // Here we need to match the union of ValueEnumerator::incorporateFunction()
151
4.87k
    // and WriteFunction().  Basic blocks are implicitly declared before
152
4.87k
    // anything else (by declaring their size).
153
4.87k
    for (const BasicBlock &BB : F)
154
8.23k
      orderValue(&BB, OM);
155
4.87k
    for (const Argument &A : F.args())
156
3.18k
      orderValue(&A, OM);
157
4.87k
    for (const BasicBlock &BB : F)
158
8.23k
      for (const Instruction &I : BB)
159
27.2k
        for (const Value *Op : I.operands())
160
43.7k
          if ((isa<Constant>(*Op) && 
!isa<GlobalValue>(*Op)17.6k
) ||
161
43.7k
              
isa<InlineAsm>(*Op)31.9k
)
162
11.8k
            orderValue(Op, OM);
163
4.87k
    for (const BasicBlock &BB : F)
164
8.23k
      for (const Instruction &I : BB)
165
27.2k
        orderValue(&I, OM);
166
4.87k
  }
167
2.86k
  return OM;
168
2.86k
}
169
170
static void predictValueUseListOrderImpl(const Value *V, const Function *F,
171
                                         unsigned ID, const OrderMap &OM,
172
6.63k
                                         UseListOrderStack &Stack) {
173
6.63k
  // Predict use-list order for this one.
174
6.63k
  using Entry = std::pair<const Use *, unsigned>;
175
6.63k
  SmallVector<Entry, 64> List;
176
6.63k
  for (const Use &U : V->uses())
177
27.4k
    // Check if this user will be serialized.
178
27.4k
    if (OM.lookup(U.getUser()).first)
179
27.0k
      List.push_back(std::make_pair(&U, List.size()));
180
6.63k
181
6.63k
  if (List.size() < 2)
182
289
    // We may have lost some users.
183
289
    return;
184
6.34k
185
6.34k
  bool IsGlobalValue = OM.isGlobalValue(ID);
186
90.2k
  llvm::sort(List, [&](const Entry &L, const Entry &R) {
187
90.2k
    const Use *LU = L.first;
188
90.2k
    const Use *RU = R.first;
189
90.2k
    if (LU == RU)
190
2.35k
      return false;
191
87.9k
192
87.9k
    auto LID = OM.lookup(LU->getUser()).first;
193
87.9k
    auto RID = OM.lookup(RU->getUser()).first;
194
87.9k
195
87.9k
    // Global values are processed in reverse order.
196
87.9k
    //
197
87.9k
    // Moreover, initializers of GlobalValues are set *after* all the globals
198
87.9k
    // have been read (despite having earlier IDs).  Rather than awkwardly
199
87.9k
    // modeling this behaviour here, orderModule() has assigned IDs to
200
87.9k
    // initializers of GlobalValues before GlobalValues themselves.
201
87.9k
    if (OM.isGlobalValue(LID) && 
OM.isGlobalValue(RID)2.22k
)
202
2.05k
      return LID < RID;
203
85.8k
204
85.8k
    // If ID is 4, then expect: 7 6 5 1 2 3.
205
85.8k
    if (LID < RID) {
206
56.5k
      if (RID <= ID)
207
160
        if (!IsGlobalValue) // GlobalValue uses don't get reversed.
208
8
          return true;
209
56.5k
      return false;
210
56.5k
    }
211
29.2k
    if (RID < LID) {
212
27.9k
      if (LID <= ID)
213
113
        if (!IsGlobalValue) // GlobalValue uses don't get reversed.
214
35
          return false;
215
27.9k
      return true;
216
27.9k
    }
217
1.31k
218
1.31k
    // LID and RID are equal, so we have different operands of the same user.
219
1.31k
    // Assume operands are added in order for all instructions.
220
1.31k
    if (LID <= ID)
221
50
      if (!IsGlobalValue) // GlobalValue uses don't get reversed.
222
12
        return LU->getOperandNo() < RU->getOperandNo();
223
1.29k
    return LU->getOperandNo() > RU->getOperandNo();
224
1.29k
  });
225
6.34k
226
6.34k
  if (std::is_sorted(
227
6.34k
          List.begin(), List.end(),
228
14.2k
          [](const Entry &L, const Entry &R) { return L.second < R.second; }))
229
4.84k
    // Order is already correct.
230
4.84k
    return;
231
1.49k
232
1.49k
  // Store the shuffle.
233
1.49k
  Stack.emplace_back(V, F, List.size());
234
1.49k
  assert(List.size() == Stack.back().Shuffle.size() && "Wrong size");
235
11.1k
  for (size_t I = 0, E = List.size(); I != E; 
++I9.64k
)
236
9.64k
    Stack.back().Shuffle[I] = List[I].second;
237
1.49k
}
238
239
static void predictValueUseListOrder(const Value *V, const Function *F,
240
74.9k
                                     OrderMap &OM, UseListOrderStack &Stack) {
241
74.9k
  auto &IDPair = OM[V];
242
74.9k
  assert(IDPair.first && "Unmapped value");
243
74.9k
  if (IDPair.second)
244
16.5k
    // Already predicted.
245
16.5k
    return;
246
58.3k
247
58.3k
  // Do the actual prediction.
248
58.3k
  IDPair.second = true;
249
58.3k
  if (!V->use_empty() && 
std::next(V->use_begin()) != V->use_end()28.8k
)
250
6.63k
    predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack);
251
58.3k
252
58.3k
  // Recursive descent into constants.
253
58.3k
  if (const Constant *C = dyn_cast<Constant>(V))
254
19.6k
    if (C->getNumOperands()) // Visit GlobalValues.
255
4.09k
      for (const Value *Op : C->operands())
256
5.98k
        if (isa<Constant>(Op)) // Visit GlobalValues.
257
5.94k
          predictValueUseListOrder(Op, F, OM, Stack);
258
58.3k
}
259
260
2.86k
static UseListOrderStack predictUseListOrder(const Module &M) {
261
2.86k
  OrderMap OM = orderModule(M);
262
2.86k
263
2.86k
  // Use-list orders need to be serialized after all the users have been added
264
2.86k
  // to a value, or else the shuffles will be incomplete.  Store them per
265
2.86k
  // function in a stack.
266
2.86k
  //
267
2.86k
  // Aside from function order, the order of values doesn't matter much here.
268
2.86k
  UseListOrderStack Stack;
269
2.86k
270
2.86k
  // We want to visit the functions backward now so we can list function-local
271
2.86k
  // constants in the last Function they're used in.  Module-level constants
272
2.86k
  // have already been visited above.
273
9.62k
  for (auto I = M.rbegin(), E = M.rend(); I != E; 
++I6.75k
) {
274
6.75k
    const Function &F = *I;
275
6.75k
    if (F.isDeclaration())
276
1.88k
      continue;
277
4.87k
    for (const BasicBlock &BB : F)
278
8.23k
      predictValueUseListOrder(&BB, &F, OM, Stack);
279
4.87k
    for (const Argument &A : F.args())
280
3.18k
      predictValueUseListOrder(&A, &F, OM, Stack);
281
4.87k
    for (const BasicBlock &BB : F)
282
8.23k
      for (const Instruction &I : BB)
283
27.2k
        for (const Value *Op : I.operands())
284
43.7k
          if (isa<Constant>(*Op) || 
isa<InlineAsm>(*Op)26.1k
) // Visit GlobalValues.
285
17.6k
            predictValueUseListOrder(Op, &F, OM, Stack);
286
4.87k
    for (const BasicBlock &BB : F)
287
8.23k
      for (const Instruction &I : BB)
288
27.2k
        predictValueUseListOrder(&I, &F, OM, Stack);
289
4.87k
  }
290
2.86k
291
2.86k
  // Visit globals last, since the module-level use-list block will be seen
292
2.86k
  // before the function bodies are processed.
293
2.86k
  for (const GlobalVariable &G : M.globals())
294
2.37k
    predictValueUseListOrder(&G, nullptr, OM, Stack);
295
2.86k
  for (const Function &F : M)
296
6.75k
    predictValueUseListOrder(&F, nullptr, OM, Stack);
297
2.86k
  for (const GlobalAlias &A : M.aliases())
298
564
    predictValueUseListOrder(&A, nullptr, OM, Stack);
299
2.86k
  for (const GlobalIFunc &I : M.ifuncs())
300
35
    predictValueUseListOrder(&I, nullptr, OM, Stack);
301
2.86k
  for (const GlobalVariable &G : M.globals())
302
2.37k
    if (G.hasInitializer())
303
1.96k
      predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack);
304
2.86k
  for (const GlobalAlias &A : M.aliases())
305
564
    predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack);
306
2.86k
  for (const GlobalIFunc &I : M.ifuncs())
307
35
    predictValueUseListOrder(I.getResolver(), nullptr, OM, Stack);
308
6.75k
  for (const Function &F : M) {
309
6.75k
    for (const Use &U : F.operands())
310
339
      predictValueUseListOrder(U.get(), nullptr, OM, Stack);
311
6.75k
  }
312
2.86k
313
2.86k
  return Stack;
314
2.86k
}
315
316
22.9k
static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) {
317
22.9k
  return V.first->getType()->isIntOrIntVectorTy();
318
22.9k
}
319
320
ValueEnumerator::ValueEnumerator(const Module &M,
321
                                 bool ShouldPreserveUseListOrder)
322
4.98k
    : ShouldPreserveUseListOrder(ShouldPreserveUseListOrder) {
323
4.98k
  if (ShouldPreserveUseListOrder)
324
2.86k
    UseListOrders = predictUseListOrder(M);
325
4.98k
326
4.98k
  // Enumerate the global variables.
327
4.98k
  for (const GlobalVariable &GV : M.globals())
328
10.8k
    EnumerateValue(&GV);
329
4.98k
330
4.98k
  // Enumerate the functions.
331
17.7k
  for (const Function & F : M) {
332
17.7k
    EnumerateValue(&F);
333
17.7k
    EnumerateAttributes(F.getAttributes());
334
17.7k
  }
335
4.98k
336
4.98k
  // Enumerate the aliases.
337
4.98k
  for (const GlobalAlias &GA : M.aliases())
338
741
    EnumerateValue(&GA);
339
4.98k
340
4.98k
  // Enumerate the ifuncs.
341
4.98k
  for (const GlobalIFunc &GIF : M.ifuncs())
342
35
    EnumerateValue(&GIF);
343
4.98k
344
4.98k
  // Remember what is the cutoff between globalvalue's and other constants.
345
4.98k
  unsigned FirstConstant = Values.size();
346
4.98k
347
4.98k
  // Enumerate the global variable initializers and attributes.
348
10.8k
  for (const GlobalVariable &GV : M.globals()) {
349
10.8k
    if (GV.hasInitializer())
350
9.05k
      EnumerateValue(GV.getInitializer());
351
10.8k
    if (GV.hasAttributes())
352
75
      EnumerateAttributes(GV.getAttributesAsList(AttributeList::FunctionIndex));
353
10.8k
  }
354
4.98k
355
4.98k
  // Enumerate the aliasees.
356
4.98k
  for (const GlobalAlias &GA : M.aliases())
357
741
    EnumerateValue(GA.getAliasee());
358
4.98k
359
4.98k
  // Enumerate the ifunc resolvers.
360
4.98k
  for (const GlobalIFunc &GIF : M.ifuncs())
361
35
    EnumerateValue(GIF.getResolver());
362
4.98k
363
4.98k
  // Enumerate any optional Function data.
364
4.98k
  for (const Function &F : M)
365
17.7k
    for (const Use &U : F.operands())
366
438
      EnumerateValue(U.get());
367
4.98k
368
4.98k
  // Enumerate the metadata type.
369
4.98k
  //
370
4.98k
  // TODO: Move this to ValueEnumerator::EnumerateOperandType() once bitcode
371
4.98k
  // only encodes the metadata type when it's used as a value.
372
4.98k
  EnumerateType(Type::getMetadataTy(M.getContext()));
373
4.98k
374
4.98k
  // Insert constants and metadata that are named at module level into the slot
375
4.98k
  // pool so that the module symbol table can refer to them...
376
4.98k
  EnumerateValueSymbolTable(M.getValueSymbolTable());
377
4.98k
  EnumerateNamedMetadata(M);
378
4.98k
379
4.98k
  SmallVector<std::pair<unsigned, MDNode *>, 8> MDs;
380
10.8k
  for (const GlobalVariable &GV : M.globals()) {
381
10.8k
    MDs.clear();
382
10.8k
    GV.getAllMetadata(MDs);
383
10.8k
    for (const auto &I : MDs)
384
245
      // FIXME: Pass GV to EnumerateMetadata and arrange for the bitcode writer
385
245
      // to write metadata to the global variable's own metadata block
386
245
      // (PR28134).
387
245
      EnumerateMetadata(nullptr, I.second);
388
10.8k
  }
389
4.98k
390
4.98k
  // Enumerate types used by function bodies and argument lists.
391
17.7k
  for (const Function &F : M) {
392
17.7k
    for (const Argument &A : F.args())
393
20.9k
      EnumerateType(A.getType());
394
17.7k
395
17.7k
    // Enumerate metadata attached to this function.
396
17.7k
    MDs.clear();
397
17.7k
    F.getAllMetadata(MDs);
398
17.7k
    for (const auto &I : MDs)
399
868
      EnumerateMetadata(F.isDeclaration() ? 
nullptr208
:
&F660
, I.second);
400
17.7k
401
17.7k
    for (const BasicBlock &BB : F)
402
181k
      
for (const Instruction &I : BB)30.2k
{
403
302k
        for (const Use &Op : I.operands()) {
404
302k
          auto *MD = dyn_cast<MetadataAsValue>(&Op);
405
302k
          if (!MD) {
406
301k
            EnumerateOperandType(Op);
407
301k
            continue;
408
301k
          }
409
971
410
971
          // Local metadata is enumerated during function-incorporation.
411
971
          if (isa<LocalAsMetadata>(MD->getMetadata()))
412
259
            continue;
413
712
414
712
          EnumerateMetadata(&F, MD->getMetadata());
415
712
        }
416
181k
        EnumerateType(I.getType());
417
181k
        if (const auto *Call = dyn_cast<CallBase>(&I))
418
17.5k
          EnumerateAttributes(Call->getAttributes());
419
181k
420
181k
        // Enumerate metadata attached with this instruction.
421
181k
        MDs.clear();
422
181k
        I.getAllMetadataOtherThanDebugLoc(MDs);
423
187k
        for (unsigned i = 0, e = MDs.size(); i != e; 
++i6.14k
)
424
6.14k
          EnumerateMetadata(&F, MDs[i].second);
425
181k
426
181k
        // Don't enumerate the location directly -- it has a special record
427
181k
        // type -- but enumerate its operands.
428
181k
        if (DILocation *L = I.getDebugLoc())
429
2.35k
          for (const Metadata *Op : L->operands())
430
2.38k
            EnumerateMetadata(&F, Op);
431
181k
      }
432
17.7k
  }
433
4.98k
434
4.98k
  // Optimize constant ordering.
435
4.98k
  OptimizeConstants(FirstConstant, Values.size());
436
4.98k
437
4.98k
  // Organize metadata ordering.
438
4.98k
  organizeMetadata();
439
4.98k
}
440
441
6.05k
unsigned ValueEnumerator::getInstructionID(const Instruction *Inst) const {
442
6.05k
  InstructionMapType::const_iterator I = InstructionMap.find(Inst);
443
6.05k
  assert(I != InstructionMap.end() && "Instruction is not mapped!");
444
6.05k
  return I->second;
445
6.05k
}
446
447
2.81k
unsigned ValueEnumerator::getComdatID(const Comdat *C) const {
448
2.81k
  unsigned ComdatID = Comdats.idFor(C);
449
2.81k
  assert(ComdatID && "Comdat not found!");
450
2.81k
  return ComdatID;
451
2.81k
}
452
453
181k
void ValueEnumerator::setInstructionID(const Instruction *I) {
454
181k
  InstructionMap[I] = InstructionCount++;
455
181k
}
456
457
485k
unsigned ValueEnumerator::getValueID(const Value *V) const {
458
485k
  if (auto *MD = dyn_cast<MetadataAsValue>(V))
459
971
    return getMetadataID(MD->getMetadata());
460
484k
461
484k
  ValueMapType::const_iterator I = ValueMap.find(V);
462
484k
  assert(I != ValueMap.end() && "Value not in slotcalculator!");
463
484k
  return I->second-1;
464
484k
}
465
466
#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
467
LLVM_DUMP_METHOD void ValueEnumerator::dump() const {
468
  print(dbgs(), ValueMap, "Default");
469
  dbgs() << '\n';
470
  print(dbgs(), MetadataMap, "MetaData");
471
  dbgs() << '\n';
472
}
473
#endif
474
475
void ValueEnumerator::print(raw_ostream &OS, const ValueMapType &Map,
476
0
                            const char *Name) const {
477
0
  OS << "Map Name: " << Name << "\n";
478
0
  OS << "Size: " << Map.size() << "\n";
479
0
  for (ValueMapType::const_iterator I = Map.begin(),
480
0
         E = Map.end(); I != E; ++I) {
481
0
    const Value *V = I->first;
482
0
    if (V->hasName())
483
0
      OS << "Value: " << V->getName();
484
0
    else
485
0
      OS << "Value: [null]\n";
486
0
    V->print(errs());
487
0
    errs() << '\n';
488
0
489
0
    OS << " Uses(" << V->getNumUses() << "):";
490
0
    for (const Use &U : V->uses()) {
491
0
      if (&U != &*V->use_begin())
492
0
        OS << ",";
493
0
      if(U->hasName())
494
0
        OS << " " << U->getName();
495
0
      else
496
0
        OS << " [null]";
497
0
498
0
    }
499
0
    OS <<  "\n\n";
500
0
  }
501
0
}
502
503
void ValueEnumerator::print(raw_ostream &OS, const MetadataMapType &Map,
504
0
                            const char *Name) const {
505
0
  OS << "Map Name: " << Name << "\n";
506
0
  OS << "Size: " << Map.size() << "\n";
507
0
  for (auto I = Map.begin(), E = Map.end(); I != E; ++I) {
508
0
    const Metadata *MD = I->first;
509
0
    OS << "Metadata: slot = " << I->second.ID << "\n";
510
0
    OS << "Metadata: function = " << I->second.F << "\n";
511
0
    MD->print(OS);
512
0
    OS << "\n";
513
0
  }
514
0
}
515
516
/// OptimizeConstants - Reorder constant pool for denser encoding.
517
17.6k
void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) {
518
17.6k
  if (CstStart == CstEnd || 
CstStart+1 == CstEnd8.97k
)
return12.7k
;
519
4.94k
520
4.94k
  if (ShouldPreserveUseListOrder)
521
1.87k
    // Optimizing constants makes the use-list order difficult to predict.
522
1.87k
    // Disable it for now when trying to preserve the order.
523
1.87k
    return;
524
3.06k
525
3.06k
  std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd,
526
3.06k
                   [this](const std::pair<const Value *, unsigned> &LHS,
527
62.9k
                          const std::pair<const Value *, unsigned> &RHS) {
528
62.9k
    // Sort by plane.
529
62.9k
    if (LHS.first->getType() != RHS.first->getType())
530
39.8k
      return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType());
531
23.1k
    // Then by frequency.
532
23.1k
    return LHS.second > RHS.second;
533
23.1k
  });
534
3.06k
535
3.06k
  // Ensure that integer and vector of integer constants are at the start of the
536
3.06k
  // constant pool.  This is important so that GEP structure indices come before
537
3.06k
  // gep constant exprs.
538
3.06k
  std::stable_partition(Values.begin() + CstStart, Values.begin() + CstEnd,
539
3.06k
                        isIntOrIntVectorValue);
540
3.06k
541
3.06k
  // Rebuild the modified portion of ValueMap.
542
26.0k
  for (; CstStart != CstEnd; 
++CstStart22.9k
)
543
22.9k
    ValueMap[Values[CstStart].first] = CstStart+1;
544
3.06k
}
545
546
/// EnumerateValueSymbolTable - Insert all of the values in the specified symbol
547
/// table into the values table.
548
4.98k
void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) {
549
4.98k
  for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end();
550
33.8k
       VI != VE; 
++VI28.8k
)
551
28.8k
    EnumerateValue(VI->getValue());
552
4.98k
}
553
554
/// Insert all of the values referenced by named metadata in the specified
555
/// module.
556
4.98k
void ValueEnumerator::EnumerateNamedMetadata(const Module &M) {
557
4.98k
  for (const auto &I : M.named_metadata())
558
3.04k
    EnumerateNamedMDNode(&I);
559
4.98k
}
560
561
3.04k
void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) {
562
9.69k
  for (unsigned i = 0, e = MD->getNumOperands(); i != e; 
++i6.64k
)
563
6.64k
    EnumerateMetadata(nullptr, MD->getOperand(i));
564
3.04k
}
565
566
17.2k
unsigned ValueEnumerator::getMetadataFunctionID(const Function *F) const {
567
17.2k
  return F ? 
getValueID(F) + 110.1k
:
07.10k
;
568
17.2k
}
569
570
16.9k
void ValueEnumerator::EnumerateMetadata(const Function *F, const Metadata *MD) {
571
16.9k
  EnumerateMetadata(getMetadataFunctionID(F), MD);
572
16.9k
}
573
574
void ValueEnumerator::EnumerateFunctionLocalMetadata(
575
259
    const Function &F, const LocalAsMetadata *Local) {
576
259
  EnumerateFunctionLocalMetadata(getMetadataFunctionID(&F), Local);
577
259
}
578
579
void ValueEnumerator::dropFunctionFromMetadata(
580
415
    MetadataMapType::value_type &FirstMD) {
581
415
  SmallVector<const MDNode *, 64> Worklist;
582
1.12k
  auto push = [&Worklist](MetadataMapType::value_type &MD) {
583
1.12k
    auto &Entry = MD.second;
584
1.12k
585
1.12k
    // Nothing to do if this metadata isn't tagged.
586
1.12k
    if (!Entry.F)
587
250
      return;
588
871
589
871
    // Drop the function tag.
590
871
    Entry.F = 0;
591
871
592
871
    // If this is has an ID and is an MDNode, then its operands have entries as
593
871
    // well.  We need to drop the function from them too.
594
871
    if (Entry.ID)
595
871
      if (auto *N = dyn_cast<MDNode>(MD.first))
596
387
        Worklist.push_back(N);
597
871
  };
598
415
  push(FirstMD);
599
802
  while (!Worklist.empty())
600
1.13k
    
for (const Metadata *Op : Worklist.pop_back_val()->operands())387
{
601
1.13k
      if (!Op)
602
424
        continue;
603
706
      auto MD = MetadataMap.find(Op);
604
706
      if (MD != MetadataMap.end())
605
706
        push(*MD);
606
706
    }
607
415
}
608
609
16.9k
void ValueEnumerator::EnumerateMetadata(unsigned F, const Metadata *MD) {
610
16.9k
  // It's vital for reader efficiency that uniqued subgraphs are done in
611
16.9k
  // post-order; it's expensive when their operands have forward references.
612
16.9k
  // If a distinct node is referenced from a uniqued node, it'll be delayed
613
16.9k
  // until the uniqued subgraph has been completely traversed.
614
16.9k
  SmallVector<const MDNode *, 32> DelayedDistinctNodes;
615
16.9k
616
16.9k
  // Start by enumerating MD, and then work through its transitive operands in
617
16.9k
  // post-order.  This requires a depth-first search.
618
16.9k
  SmallVector<std::pair<const MDNode *, MDNode::op_iterator>, 32> Worklist;
619
16.9k
  if (const MDNode *N = enumerateMetadataImpl(F, MD))
620
9.46k
    Worklist.push_back(std::make_pair(N, N->op_begin()));
621
16.9k
622
42.1k
  while (!Worklist.empty()) {
623
25.2k
    const MDNode *N = Worklist.back().first;
624
25.2k
625
25.2k
    // Enumerate operands until we hit a new node.  We need to traverse these
626
25.2k
    // nodes' operands before visiting the rest of N's operands.
627
25.2k
    MDNode::op_iterator I = std::find_if(
628
25.2k
        Worklist.back().second, N->op_end(),
629
61.5k
        [&](const Metadata *MD) { return enumerateMetadataImpl(F, MD); });
630
25.2k
    if (I != N->op_end()) {
631
7.86k
      auto *Op = cast<MDNode>(*I);
632
7.86k
      Worklist.back().second = ++I;
633
7.86k
634
7.86k
      // Delay traversing Op if it's a distinct node and N is uniqued.
635
7.86k
      if (Op->isDistinct() && 
!N->isDistinct()847
)
636
610
        DelayedDistinctNodes.push_back(Op);
637
7.25k
      else
638
7.25k
        Worklist.push_back(std::make_pair(Op, Op->op_begin()));
639
7.86k
      continue;
640
7.86k
    }
641
17.3k
642
17.3k
    // All the operands have been visited.  Now assign an ID.
643
17.3k
    Worklist.pop_back();
644
17.3k
    MDs.push_back(N);
645
17.3k
    MetadataMap[N].ID = MDs.size();
646
17.3k
647
17.3k
    // Flush out any delayed distinct nodes; these are all the distinct nodes
648
17.3k
    // that are leaves in last uniqued subgraph.
649
17.3k
    if (Worklist.empty() || 
Worklist.back().first->isDistinct()7.71k
) {
650
12.4k
      for (const MDNode *N : DelayedDistinctNodes)
651
610
        Worklist.push_back(std::make_pair(N, N->op_begin()));
652
12.4k
      DelayedDistinctNodes.clear();
653
12.4k
    }
654
17.3k
  }
655
16.9k
}
656
657
78.5k
const MDNode *ValueEnumerator::enumerateMetadataImpl(unsigned F, const Metadata *MD) {
658
78.5k
  if (!MD)
659
14.6k
    return nullptr;
660
63.8k
661
63.8k
  assert(
662
63.8k
      (isa<MDNode>(MD) || isa<MDString>(MD) || isa<ConstantAsMetadata>(MD)) &&
663
63.8k
      "Invalid metadata kind");
664
63.8k
665
63.8k
  auto Insertion = MetadataMap.insert(std::make_pair(MD, MDIndex(F)));
666
63.8k
  MDIndex &Entry = Insertion.first->second;
667
63.8k
  if (!Insertion.second) {
668
26.8k
    // Already mapped.  If F doesn't match the function tag, drop it.
669
26.8k
    if (Entry.hasDifferentFunction(F))
670
415
      dropFunctionFromMetadata(*Insertion.first);
671
26.8k
    return nullptr;
672
26.8k
  }
673
37.0k
674
37.0k
  // Don't assign IDs to metadata nodes.
675
37.0k
  if (auto *N = dyn_cast<MDNode>(MD))
676
17.3k
    return N;
677
19.7k
678
19.7k
  // Save the metadata.
679
19.7k
  MDs.push_back(MD);
680
19.7k
  Entry.ID = MDs.size();
681
19.7k
682
19.7k
  // Enumerate the constant, if any.
683
19.7k
  if (auto *C = dyn_cast<ConstantAsMetadata>(MD))
684
9.62k
    EnumerateValue(C->getValue());
685
19.7k
686
19.7k
  return nullptr;
687
19.7k
}
688
689
/// EnumerateFunctionLocalMetadataa - Incorporate function-local metadata
690
/// information reachable from the metadata.
691
void ValueEnumerator::EnumerateFunctionLocalMetadata(
692
259
    unsigned F, const LocalAsMetadata *Local) {
693
259
  assert(F && "Expected a function");
694
259
695
259
  // Check to see if it's already in!
696
259
  MDIndex &Index = MetadataMap[Local];
697
259
  if (Index.ID) {
698
3
    assert(Index.F == F && "Expected the same function");
699
3
    return;
700
3
  }
701
256
702
256
  MDs.push_back(Local);
703
256
  Index.F = F;
704
256
  Index.ID = MDs.size();
705
256
706
256
  EnumerateValue(Local->getValue());
707
256
}
708
709
367k
static unsigned getMetadataTypeOrder(const Metadata *MD) {
710
367k
  // Strings are emitted in bulk and must come first.
711
367k
  if (isa<MDString>(MD))
712
84.3k
    return 0;
713
283k
714
283k
  // ConstantAsMetadata doesn't reference anything.  We may as well shuffle it
715
283k
  // to the front since we can detect it.
716
283k
  auto *N = dyn_cast<MDNode>(MD);
717
283k
  if (!N)
718
118k
    return 1;
719
165k
720
165k
  // The reader is fast forward references for distinct node operands, but slow
721
165k
  // when uniqued operands are unresolved.
722
165k
  return N->isDistinct() ? 
230.5k
:
3134k
;
723
165k
}
724
725
4.98k
void ValueEnumerator::organizeMetadata() {
726
4.98k
  assert(MetadataMap.size() == MDs.size() &&
727
4.98k
         "Metadata map and vector out of sync");
728
4.98k
729
4.98k
  if (MDs.empty())
730
3.32k
    return;
731
1.66k
732
1.66k
  // Copy out the index information from MetadataMap in order to choose a new
733
1.66k
  // order.
734
1.66k
  SmallVector<MDIndex, 64> Order;
735
1.66k
  Order.reserve(MetadataMap.size());
736
1.66k
  for (const Metadata *MD : MDs)
737
37.0k
    Order.push_back(MetadataMap.lookup(MD));
738
1.66k
739
1.66k
  // Partition:
740
1.66k
  //   - by function, then
741
1.66k
  //   - by isa<MDString>
742
1.66k
  // and then sort by the original/current ID.  Since the IDs are guaranteed to
743
1.66k
  // be unique, the result of std::sort will be deterministic.  There's no need
744
1.66k
  // for std::stable_sort.
745
183k
  llvm::sort(Order, [this](MDIndex LHS, MDIndex RHS) {
746
183k
    return std::make_tuple(LHS.F, getMetadataTypeOrder(LHS.get(MDs)), LHS.ID) <
747
183k
           std::make_tuple(RHS.F, getMetadataTypeOrder(RHS.get(MDs)), RHS.ID);
748
183k
  });
749
1.66k
750
1.66k
  // Rebuild MDs, index the metadata ranges for each function in FunctionMDs,
751
1.66k
  // and fix up MetadataMap.
752
1.66k
  std::vector<const Metadata *> OldMDs;
753
1.66k
  MDs.swap(OldMDs);
754
1.66k
  MDs.reserve(OldMDs.size());
755
30.5k
  for (unsigned I = 0, E = Order.size(); I != E && 
!Order[I].F29.6k
;
++I28.9k
) {
756
28.9k
    auto *MD = Order[I].get(OldMDs);
757
28.9k
    MDs.push_back(MD);
758
28.9k
    MetadataMap[MD].ID = I + 1;
759
28.9k
    if (isa<MDString>(MD))
760
7.72k
      ++NumMDStrings;
761
28.9k
  }
762
1.66k
763
1.66k
  // Return early if there's nothing for the functions.
764
1.66k
  if (MDs.size() == Order.size())
765
960
    return;
766
701
767
701
  // Build the function metadata ranges.
768
701
  MDRange R;
769
701
  FunctionMDs.reserve(OldMDs.size());
770
701
  unsigned PrevF = 0;
771
8.85k
  for (unsigned I = MDs.size(), E = Order.size(), ID = MDs.size(); I != E;
772
8.15k
       ++I) {
773
8.15k
    unsigned F = Order[I].F;
774
8.15k
    if (!PrevF) {
775
701
      PrevF = F;
776
7.45k
    } else if (PrevF != F) {
777
870
      R.Last = FunctionMDs.size();
778
870
      std::swap(R, FunctionMDInfo[PrevF]);
779
870
      R.First = FunctionMDs.size();
780
870
781
870
      ID = MDs.size();
782
870
      PrevF = F;
783
870
    }
784
8.15k
785
8.15k
    auto *MD = Order[I].get(OldMDs);
786
8.15k
    FunctionMDs.push_back(MD);
787
8.15k
    MetadataMap[MD].ID = ++ID;
788
8.15k
    if (isa<MDString>(MD))
789
2.37k
      ++R.NumStrings;
790
8.15k
  }
791
701
  R.Last = FunctionMDs.size();
792
701
  FunctionMDInfo[PrevF] = R;
793
701
}
794
795
12.7k
void ValueEnumerator::incorporateFunctionMetadata(const Function &F) {
796
12.7k
  NumModuleMDs = MDs.size();
797
12.7k
798
12.7k
  auto R = FunctionMDInfo.lookup(getValueID(&F) + 1);
799
12.7k
  NumMDStrings = R.NumStrings;
800
12.7k
  MDs.insert(MDs.end(), FunctionMDs.begin() + R.First,
801
12.7k
             FunctionMDs.begin() + R.Last);
802
12.7k
}
803
804
310k
void ValueEnumerator::EnumerateValue(const Value *V) {
805
310k
  assert(!V->getType()->isVoidTy() && "Can't insert void values!");
806
310k
  assert(!isa<MetadataAsValue>(V) && "EnumerateValue doesn't handle Metadata!");
807
310k
808
310k
  // Check to see if it's already in!
809
310k
  unsigned &ValueID = ValueMap[V];
810
310k
  if (ValueID) {
811
124k
    // Increment use count.
812
124k
    Values[ValueID-1].second++;
813
124k
    return;
814
124k
  }
815
186k
816
186k
  if (auto *GO = dyn_cast<GlobalObject>(V))
817
28.6k
    if (const Comdat *C = GO->getComdat())
818
2.81k
      Comdats.insert(C);
819
186k
820
186k
  // Enumerate the type of this value.
821
186k
  EnumerateType(V->getType());
822
186k
823
186k
  if (const Constant *C = dyn_cast<Constant>(V)) {
824
68.3k
    if (isa<GlobalValue>(C)) {
825
29.4k
      // Initializers for globals are handled explicitly elsewhere.
826
38.9k
    } else if (C->getNumOperands()) {
827
10.6k
      // If a constant has operands, enumerate them.  This makes sure that if a
828
10.6k
      // constant has uses (for example an array of const ints), that they are
829
10.6k
      // inserted also.
830
10.6k
831
10.6k
      // We prefer to enumerate them with values before we enumerate the user
832
10.6k
      // itself.  This makes it more likely that we can avoid forward references
833
10.6k
      // in the reader.  We know that there can be no cycles in the constants
834
10.6k
      // graph that don't go through a global variable.
835
10.6k
      for (User::const_op_iterator I = C->op_begin(), E = C->op_end();
836
37.5k
           I != E; 
++I26.8k
)
837
26.8k
        if (!isa<BasicBlock>(*I)) // Don't enumerate BB operand to BlockAddress.
838
26.7k
          EnumerateValue(*I);
839
10.6k
840
10.6k
      // Finally, add the value.  Doing this could make the ValueID reference be
841
10.6k
      // dangling, don't reuse it.
842
10.6k
      Values.push_back(std::make_pair(V, 1U));
843
10.6k
      ValueMap[V] = Values.size();
844
10.6k
      return;
845
10.6k
    }
846
175k
  }
847
175k
848
175k
  // Add the value.
849
175k
  Values.push_back(std::make_pair(V, 1U));
850
175k
  ValueID = Values.size();
851
175k
}
852
853
854
771k
void ValueEnumerator::EnumerateType(Type *Ty) {
855
771k
  unsigned *TypeID = &TypeMap[Ty];
856
771k
857
771k
  // We've already seen this type.
858
771k
  if (*TypeID)
859
712k
    return;
860
59.1k
861
59.1k
  // If it is a non-anonymous struct, mark the type as being visited so that we
862
59.1k
  // don't recursively visit it.  This is safe because we allow forward
863
59.1k
  // references of these in the bitcode reader.
864
59.1k
  if (StructType *STy = dyn_cast<StructType>(Ty))
865
3.81k
    if (!STy->isLiteral())
866
3.07k
      *TypeID = ~0U;
867
59.1k
868
59.1k
  // Enumerate all of the subtypes before we enumerate this type.  This ensures
869
59.1k
  // that the type will be enumerated in an order that can be directly built.
870
59.1k
  for (Type *SubTy : Ty->subtypes())
871
67.2k
    EnumerateType(SubTy);
872
59.1k
873
59.1k
  // Refresh the TypeID pointer in case the table rehashed.
874
59.1k
  TypeID = &TypeMap[Ty];
875
59.1k
876
59.1k
  // Check to see if we got the pointer another way.  This can happen when
877
59.1k
  // enumerating recursive types that hit the base case deeper than they start.
878
59.1k
  //
879
59.1k
  // If this is actually a struct that we are treating as forward ref'able,
880
59.1k
  // then emit the definition now that all of its contents are available.
881
59.1k
  if (*TypeID && 
*TypeID != ~0U3.25k
)
882
181
    return;
883
59.0k
884
59.0k
  // Add this type now that its contents are all happily enumerated.
885
59.0k
  Types.push_back(Ty);
886
59.0k
887
59.0k
  *TypeID = Types.size();
888
59.0k
}
889
890
// Enumerate the types for the specified value.  If the value is a constant,
891
// walk through it, enumerating the types of the constant.
892
310k
void ValueEnumerator::EnumerateOperandType(const Value *V) {
893
310k
  EnumerateType(V->getType());
894
310k
895
310k
  assert(!isa<MetadataAsValue>(V) && "Unexpected metadata operand");
896
310k
897
310k
  const Constant *C = dyn_cast<Constant>(V);
898
310k
  if (!C)
899
187k
    return;
900
123k
901
123k
  // If this constant is already enumerated, ignore it, we know its type must
902
123k
  // be enumerated.
903
123k
  if (ValueMap.count(C))
904
97.1k
    return;
905
26.6k
906
26.6k
  // This constant may have operands, make sure to enumerate the types in
907
26.6k
  // them.
908
26.6k
  for (const Value *Op : C->operands()) {
909
9.23k
    // Don't enumerate basic blocks here, this happens as operands to
910
9.23k
    // blockaddress.
911
9.23k
    if (isa<BasicBlock>(Op))
912
58
      continue;
913
9.17k
914
9.17k
    EnumerateOperandType(Op);
915
9.17k
  }
916
26.6k
}
917
918
48.1k
void ValueEnumerator::EnumerateAttributes(AttributeList PAL) {
919
48.1k
  if (PAL.isEmpty()) 
return27.5k
; // null is always 0.
920
20.5k
921
20.5k
  // Do a lookup.
922
20.5k
  unsigned &Entry = AttributeListMap[PAL];
923
20.5k
  if (Entry == 0) {
924
4.30k
    // Never saw this before, add it.
925
4.30k
    AttributeLists.push_back(PAL);
926
4.30k
    Entry = AttributeLists.size();
927
4.30k
  }
928
20.5k
929
20.5k
  // Do lookups for all attribute groups.
930
59.3k
  for (unsigned i = PAL.index_begin(), e = PAL.index_end(); i != e; 
++i38.8k
) {
931
38.8k
    AttributeSet AS = PAL.getAttributes(i);
932
38.8k
    if (!AS.hasAttributes())
933
8.28k
      continue;
934
30.5k
    IndexAndAttrSet Pair = {i, AS};
935
30.5k
    unsigned &Entry = AttributeGroupMap[Pair];
936
30.5k
    if (Entry == 0) {
937
5.29k
      AttributeGroups.push_back(Pair);
938
5.29k
      Entry = AttributeGroups.size();
939
5.29k
    }
940
30.5k
  }
941
20.5k
}
942
943
12.7k
void ValueEnumerator::incorporateFunction(const Function &F) {
944
12.7k
  InstructionCount = 0;
945
12.7k
  NumModuleValues = Values.size();
946
12.7k
947
12.7k
  // Add global metadata to the function block.  This doesn't include
948
12.7k
  // LocalAsMetadata.
949
12.7k
  incorporateFunctionMetadata(F);
950
12.7k
951
12.7k
  // Adding function arguments to the value table.
952
12.7k
  for (const auto &I : F.args()) {
953
12.4k
    EnumerateValue(&I);
954
12.4k
    if (I.hasAttribute(Attribute::ByVal))
955
19
      EnumerateType(I.getParamByValType());
956
12.4k
  }
957
12.7k
  FirstFuncConstantID = Values.size();
958
12.7k
959
12.7k
  // Add all function-level constants to the value table.
960
30.2k
  for (const BasicBlock &BB : F) {
961
30.2k
    for (const Instruction &I : BB)
962
302k
      
for (const Use &OI : I.operands())181k
{
963
302k
        if ((isa<Constant>(OI) && 
!isa<GlobalValue>(OI)114k
) ||
isa<InlineAsm>(OI)215k
)
964
87.5k
          EnumerateValue(OI);
965
302k
      }
966
30.2k
    BasicBlocks.push_back(&BB);
967
30.2k
    ValueMap[&BB] = BasicBlocks.size();
968
30.2k
  }
969
12.7k
970
12.7k
  // Optimize the constant layout.
971
12.7k
  OptimizeConstants(FirstFuncConstantID, Values.size());
972
12.7k
973
12.7k
  // Add the function's parameter attributes so they are available for use in
974
12.7k
  // the function's instruction.
975
12.7k
  EnumerateAttributes(F.getAttributes());
976
12.7k
977
12.7k
  FirstInstID = Values.size();
978
12.7k
979
12.7k
  SmallVector<LocalAsMetadata *, 8> FnLocalMDVector;
980
12.7k
  // Add all of the instructions.
981
30.2k
  for (const BasicBlock &BB : F) {
982
181k
    for (const Instruction &I : BB) {
983
302k
      for (const Use &OI : I.operands()) {
984
302k
        if (auto *MD = dyn_cast<MetadataAsValue>(&OI))
985
971
          if (auto *Local = dyn_cast<LocalAsMetadata>(MD->getMetadata()))
986
259
            // Enumerate metadata after the instructions they might refer to.
987
259
            FnLocalMDVector.push_back(Local);
988
302k
      }
989
181k
990
181k
      if (!I.getType()->isVoidTy())
991
105k
        EnumerateValue(&I);
992
181k
    }
993
30.2k
  }
994
12.7k
995
12.7k
  // Add all of the function-local metadata.
996
12.9k
  for (unsigned i = 0, e = FnLocalMDVector.size(); i != e; 
++i259
) {
997
259
    // At this point, every local values have been incorporated, we shouldn't
998
259
    // have a metadata operand that references a value that hasn't been seen.
999
259
    assert(ValueMap.count(FnLocalMDVector[i]->getValue()) &&
1000
259
           "Missing value for metadata operand");
1001
259
    EnumerateFunctionLocalMetadata(F, FnLocalMDVector[i]);
1002
259
  }
1003
12.7k
}
1004
1005
12.7k
void ValueEnumerator::purgeFunction() {
1006
12.7k
  /// Remove purged values from the ValueMap.
1007
148k
  for (unsigned i = NumModuleValues, e = Values.size(); i != e; 
++i135k
)
1008
135k
    ValueMap.erase(Values[i].first);
1009
21.1k
  for (unsigned i = NumModuleMDs, e = MDs.size(); i != e; 
++i8.39k
)
1010
8.39k
    MetadataMap.erase(MDs[i]);
1011
42.9k
  for (unsigned i = 0, e = BasicBlocks.size(); i != e; 
++i30.2k
)
1012
30.2k
    ValueMap.erase(BasicBlocks[i]);
1013
12.7k
1014
12.7k
  Values.resize(NumModuleValues);
1015
12.7k
  MDs.resize(NumModuleMDs);
1016
12.7k
  BasicBlocks.clear();
1017
12.7k
  NumMDStrings = 0;
1018
12.7k
}
1019
1020
static void IncorporateFunctionInfoGlobalBBIDs(const Function *F,
1021
57
                                 DenseMap<const BasicBlock*, unsigned> &IDMap) {
1022
57
  unsigned Counter = 0;
1023
57
  for (const BasicBlock &BB : *F)
1024
207
    IDMap[&BB] = ++Counter;
1025
57
}
1026
1027
/// getGlobalBasicBlockID - This returns the function-specific ID for the
1028
/// specified basic block.  This is relatively expensive information, so it
1029
/// should only be used by rare constructs such as address-of-label.
1030
143
unsigned ValueEnumerator::getGlobalBasicBlockID(const BasicBlock *BB) const {
1031
143
  unsigned &Idx = GlobalBasicBlockIDs[BB];
1032
143
  if (Idx != 0)
1033
86
    return Idx-1;
1034
57
1035
57
  IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs);
1036
57
  return getGlobalBasicBlockID(BB);
1037
57
}
1038
1039
24.8k
uint64_t ValueEnumerator::computeBitsRequiredForTypeIndicies() const {
1040
24.8k
  return Log2_32_Ceil(getTypes().size() + 1);
1041
24.8k
}