Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/lib/Analysis/CGSCCPassManager.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#include "llvm/Analysis/CGSCCPassManager.h"
10
#include "llvm/ADT/ArrayRef.h"
11
#include "llvm/ADT/Optional.h"
12
#include "llvm/ADT/STLExtras.h"
13
#include "llvm/ADT/SetVector.h"
14
#include "llvm/ADT/SmallPtrSet.h"
15
#include "llvm/ADT/SmallVector.h"
16
#include "llvm/ADT/iterator_range.h"
17
#include "llvm/Analysis/LazyCallGraph.h"
18
#include "llvm/IR/CallSite.h"
19
#include "llvm/IR/Constant.h"
20
#include "llvm/IR/InstIterator.h"
21
#include "llvm/IR/Instruction.h"
22
#include "llvm/IR/PassManager.h"
23
#include "llvm/Support/Casting.h"
24
#include "llvm/Support/Debug.h"
25
#include "llvm/Support/raw_ostream.h"
26
#include <algorithm>
27
#include <cassert>
28
#include <iterator>
29
30
#define DEBUG_TYPE "cgscc"
31
32
using namespace llvm;
33
34
// Explicit template instantiations and specialization definitions for core
35
// template typedefs.
36
namespace llvm {
37
38
// Explicit instantiations for the core proxy templates.
39
template class AllAnalysesOn<LazyCallGraph::SCC>;
40
template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
41
template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
42
                           LazyCallGraph &, CGSCCUpdateResult &>;
43
template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
44
template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
45
                                         LazyCallGraph::SCC, LazyCallGraph &>;
46
template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
47
48
/// Explicitly specialize the pass manager run method to handle call graph
49
/// updates.
50
template <>
51
PreservedAnalyses
52
PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
53
            CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
54
                                      CGSCCAnalysisManager &AM,
55
2.25k
                                      LazyCallGraph &G, CGSCCUpdateResult &UR) {
56
2.25k
  // Request PassInstrumentation from analysis manager, will use it to run
57
2.25k
  // instrumenting callbacks for the passes later.
58
2.25k
  PassInstrumentation PI =
59
2.25k
      AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
60
2.25k
61
2.25k
  PreservedAnalyses PA = PreservedAnalyses::all();
62
2.25k
63
2.25k
  if (DebugLogging)
64
190
    dbgs() << "Starting CGSCC pass manager run.\n";
65
2.25k
66
2.25k
  // The SCC may be refined while we are running passes over it, so set up
67
2.25k
  // a pointer that we can update.
68
2.25k
  LazyCallGraph::SCC *C = &InitialC;
69
2.25k
70
4.92k
  for (auto &Pass : Passes) {
71
4.92k
    if (DebugLogging)
72
426
      dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
73
4.92k
74
4.92k
    // Check the PassInstrumentation's BeforePass callbacks before running the
75
4.92k
    // pass, skip its execution completely if asked to (callback returns false).
76
4.92k
    if (!PI.runBeforePass(*Pass, *C))
77
1
      continue;
78
4.92k
79
4.92k
    PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
80
4.92k
81
4.92k
    if (UR.InvalidatedSCCs.count(C))
82
19
      PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass);
83
4.90k
    else
84
4.90k
      PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C);
85
4.92k
86
4.92k
    // Update the SCC if necessary.
87
4.92k
    C = UR.UpdatedC ? 
UR.UpdatedC51
:
C4.87k
;
88
4.92k
89
4.92k
    // If the CGSCC pass wasn't able to provide a valid updated SCC, the
90
4.92k
    // current SCC may simply need to be skipped if invalid.
91
4.92k
    if (UR.InvalidatedSCCs.count(C)) {
92
3
      LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
93
3
      break;
94
3
    }
95
4.92k
    // Check that we didn't miss any update scenario.
96
4.92k
    assert(C->begin() != C->end() && "Cannot have an empty SCC!");
97
4.92k
98
4.92k
    // Update the analysis manager as each pass runs and potentially
99
4.92k
    // invalidates analyses.
100
4.92k
    AM.invalidate(*C, PassPA);
101
4.92k
102
4.92k
    // Finally, we intersect the final preserved analyses to compute the
103
4.92k
    // aggregate preserved set for this pass manager.
104
4.92k
    PA.intersect(std::move(PassPA));
105
4.92k
106
4.92k
    // FIXME: Historically, the pass managers all called the LLVM context's
107
4.92k
    // yield function here. We don't have a generic way to acquire the
108
4.92k
    // context and it isn't yet clear what the right pattern is for yielding
109
4.92k
    // in the new pass manager so it is currently omitted.
110
4.92k
    // ...getContext().yield();
111
4.92k
  }
112
2.25k
113
2.25k
  // Before we mark all of *this* SCC's analyses as preserved below, intersect
114
2.25k
  // this with the cross-SCC preserved analysis set. This is used to allow
115
2.25k
  // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
116
2.25k
  // for them.
117
2.25k
  UR.CrossSCCPA.intersect(PA);
118
2.25k
119
2.25k
  // Invalidation was handled after each pass in the above loop for the current
120
2.25k
  // SCC. Therefore, the remaining analysis results in the AnalysisManager are
121
2.25k
  // preserved. We mark this with a set so that we don't need to inspect each
122
2.25k
  // one individually.
123
2.25k
  PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
124
2.25k
125
2.25k
  if (DebugLogging)
126
190
    dbgs() << "Finished CGSCC pass manager run.\n";
127
2.25k
128
2.25k
  return PA;
129
2.25k
}
130
131
bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
132
    Module &M, const PreservedAnalyses &PA,
133
376
    ModuleAnalysisManager::Invalidator &Inv) {
134
376
  // If literally everything is preserved, we're done.
135
376
  if (PA.areAllPreserved())
136
0
    return false; // This is still a valid proxy.
137
376
138
376
  // If this proxy or the call graph is going to be invalidated, we also need
139
376
  // to clear all the keys coming from that analysis.
140
376
  //
141
376
  // We also directly invalidate the FAM's module proxy if necessary, and if
142
376
  // that proxy isn't preserved we can't preserve this proxy either. We rely on
143
376
  // it to handle module -> function analysis invalidation in the face of
144
376
  // structural changes and so if it's unavailable we conservatively clear the
145
376
  // entire SCC layer as well rather than trying to do invalidation ourselves.
146
376
  auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
147
376
  if (!(PAC.preserved() || 
PAC.preservedSet<AllAnalysesOn<Module>>()107
) ||
148
376
      
Inv.invalidate<LazyCallGraphAnalysis>(M, PA)269
||
149
376
      
Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)269
) {
150
107
    InnerAM->clear();
151
107
152
107
    // And the proxy itself should be marked as invalid so that we can observe
153
107
    // the new call graph. This isn't strictly necessary because we cheat
154
107
    // above, but is still useful.
155
107
    return true;
156
107
  }
157
269
158
269
  // Directly check if the relevant set is preserved so we can short circuit
159
269
  // invalidating SCCs below.
160
269
  bool AreSCCAnalysesPreserved =
161
269
      PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
162
269
163
269
  // Ok, we have a graph, so we can propagate the invalidation down into it.
164
269
  G->buildRefSCCs();
165
269
  for (auto &RC : G->postorder_ref_sccs())
166
1.72k
    
for (auto &C : RC)1.69k
{
167
1.72k
      Optional<PreservedAnalyses> InnerPA;
168
1.72k
169
1.72k
      // Check to see whether the preserved set needs to be adjusted based on
170
1.72k
      // module-level analysis invalidation triggering deferred invalidation
171
1.72k
      // for this SCC.
172
1.72k
      if (auto *OuterProxy =
173
1.72k
              InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
174
1.72k
        for (const auto &OuterInvalidationPair :
175
1.72k
             OuterProxy->getOuterInvalidations()) {
176
14
          AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
177
14
          const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
178
14
          if (Inv.invalidate(OuterAnalysisID, M, PA)) {
179
14
            if (!InnerPA)
180
14
              InnerPA = PA;
181
14
            for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
182
14
              InnerPA->abandon(InnerAnalysisID);
183
14
          }
184
14
        }
185
1.72k
186
1.72k
      // Check if we needed a custom PA set. If so we'll need to run the inner
187
1.72k
      // invalidation.
188
1.72k
      if (InnerPA) {
189
14
        InnerAM->invalidate(C, *InnerPA);
190
14
        continue;
191
14
      }
192
1.70k
193
1.70k
      // Otherwise we only need to do invalidation if the original PA set didn't
194
1.70k
      // preserve all SCC analyses.
195
1.70k
      if (!AreSCCAnalysesPreserved)
196
18
        InnerAM->invalidate(C, PA);
197
1.70k
    }
198
269
199
269
  // Return false to indicate that this result is still a valid proxy.
200
269
  return false;
201
269
}
202
203
template <>
204
CGSCCAnalysisManagerModuleProxy::Result
205
423
CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
206
423
  // Force the Function analysis manager to also be available so that it can
207
423
  // be accessed in an SCC analysis and proxied onward to function passes.
208
423
  // FIXME: It is pretty awkward to just drop the result here and assert that
209
423
  // we can find it again later.
210
423
  (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
211
423
212
423
  return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
213
423
}
214
215
AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
216
217
FunctionAnalysisManagerCGSCCProxy::Result
218
FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
219
                                       CGSCCAnalysisManager &AM,
220
3.53k
                                       LazyCallGraph &CG) {
221
3.53k
  // Collect the FunctionAnalysisManager from the Module layer and use that to
222
3.53k
  // build the proxy result.
223
3.53k
  //
224
3.53k
  // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
225
3.53k
  // invalidate the function analyses.
226
3.53k
  auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
227
3.53k
  Module &M = *C.begin()->getFunction().getParent();
228
3.53k
  auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
229
3.53k
  assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
230
3.53k
                     "proxy is run on the module prior to entering the CGSCC "
231
3.53k
                     "walk.");
232
3.53k
233
3.53k
  // Note that we special-case invalidation handling of this proxy in the CGSCC
234
3.53k
  // analysis manager's Module proxy. This avoids the need to do anything
235
3.53k
  // special here to recompute all of this if ever the FAM's module proxy goes
236
3.53k
  // away.
237
3.53k
  return Result(FAMProxy->getManager());
238
3.53k
}
239
240
bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
241
    LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
242
3.84k
    CGSCCAnalysisManager::Invalidator &Inv) {
243
3.84k
  // If literally everything is preserved, we're done.
244
3.84k
  if (PA.areAllPreserved())
245
0
    return false; // This is still a valid proxy.
246
3.84k
247
3.84k
  // If this proxy isn't marked as preserved, then even if the result remains
248
3.84k
  // valid, the key itself may no longer be valid, so we clear everything.
249
3.84k
  //
250
3.84k
  // Note that in order to preserve this proxy, a module pass must ensure that
251
3.84k
  // the FAM has been completely updated to handle the deletion of functions.
252
3.84k
  // Specifically, any FAM-cached results for those functions need to have been
253
3.84k
  // forcibly cleared. When preserved, this proxy will only invalidate results
254
3.84k
  // cached on functions *still in the module* at the end of the module pass.
255
3.84k
  auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
256
3.84k
  if (!PAC.preserved() && 
!PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()1.50k
) {
257
1.50k
    for (LazyCallGraph::Node &N : C)
258
1.60k
      FAM->clear(N.getFunction(), N.getFunction().getName());
259
1.50k
260
1.50k
    return true;
261
1.50k
  }
262
2.33k
263
2.33k
  // Directly check if the relevant set is preserved.
264
2.33k
  bool AreFunctionAnalysesPreserved =
265
2.33k
      PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
266
2.33k
267
2.33k
  // Now walk all the functions to see if any inner analysis invalidation is
268
2.33k
  // necessary.
269
2.57k
  for (LazyCallGraph::Node &N : C) {
270
2.57k
    Function &F = N.getFunction();
271
2.57k
    Optional<PreservedAnalyses> FunctionPA;
272
2.57k
273
2.57k
    // Check to see whether the preserved set needs to be pruned based on
274
2.57k
    // SCC-level analysis invalidation that triggers deferred invalidation
275
2.57k
    // registered with the outer analysis manager proxy for this function.
276
2.57k
    if (auto *OuterProxy =
277
42
            FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
278
42
      for (const auto &OuterInvalidationPair :
279
42
           OuterProxy->getOuterInvalidations()) {
280
15
        AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
281
15
        const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
282
15
        if (Inv.invalidate(OuterAnalysisID, C, PA)) {
283
14
          if (!FunctionPA)
284
14
            FunctionPA = PA;
285
14
          for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
286
14
            FunctionPA->abandon(InnerAnalysisID);
287
14
        }
288
15
      }
289
2.57k
290
2.57k
    // Check if we needed a custom PA set, and if so we'll need to run the
291
2.57k
    // inner invalidation.
292
2.57k
    if (FunctionPA) {
293
14
      FAM->invalidate(F, *FunctionPA);
294
14
      continue;
295
14
    }
296
2.55k
297
2.55k
    // Otherwise we only need to do invalidation if the original PA set didn't
298
2.55k
    // preserve all function analyses.
299
2.55k
    if (!AreFunctionAnalysesPreserved)
300
1.53k
      FAM->invalidate(F, PA);
301
2.55k
  }
302
2.33k
303
2.33k
  // Return false to indicate that this result is still a valid proxy.
304
2.33k
  return false;
305
2.33k
}
306
307
} // end namespace llvm
308
309
/// When a new SCC is created for the graph and there might be function
310
/// analysis results cached for the functions now in that SCC two forms of
311
/// updates are required.
312
///
313
/// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
314
/// created so that any subsequent invalidation events to the SCC are
315
/// propagated to the function analysis results cached for functions within it.
316
///
317
/// Second, if any of the functions within the SCC have analysis results with
318
/// outer analysis dependencies, then those dependencies would point to the
319
/// *wrong* SCC's analysis result. We forcibly invalidate the necessary
320
/// function analyses so that they don't retain stale handles.
321
static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
322
                                         LazyCallGraph &G,
323
47
                                         CGSCCAnalysisManager &AM) {
324
47
  // Get the relevant function analysis manager.
325
47
  auto &FAM =
326
47
      AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
327
47
328
47
  // Now walk the functions in this SCC and invalidate any function analysis
329
47
  // results that might have outer dependencies on an SCC analysis.
330
49
  for (LazyCallGraph::Node &N : C) {
331
49
    Function &F = N.getFunction();
332
49
333
49
    auto *OuterProxy =
334
49
        FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
335
49
    if (!OuterProxy)
336
47
      // No outer analyses were queried, nothing to do.
337
47
      continue;
338
2
339
2
    // Forcibly abandon all the inner analyses with dependencies, but
340
2
    // invalidate nothing else.
341
2
    auto PA = PreservedAnalyses::all();
342
2
    for (const auto &OuterInvalidationPair :
343
2
         OuterProxy->getOuterInvalidations()) {
344
2
      const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
345
2
      for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
346
2
        PA.abandon(InnerAnalysisID);
347
2
    }
348
2
349
2
    // Now invalidate anything we found.
350
2
    FAM.invalidate(F, PA);
351
2
  }
352
47
}
353
354
/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
355
/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
356
/// added SCCs.
357
///
358
/// The range of new SCCs must be in postorder already. The SCC they were split
359
/// out of must be provided as \p C. The current node being mutated and
360
/// triggering updates must be passed as \p N.
361
///
362
/// This function returns the SCC containing \p N. This will be either \p C if
363
/// no new SCCs have been split out, or it will be the new SCC containing \p N.
364
template <typename SCCRangeT>
365
static LazyCallGraph::SCC *
366
incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
367
                       LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
368
45
                       CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
369
45
  using SCC = LazyCallGraph::SCC;
370
45
371
45
  if (NewSCCRange.begin() == NewSCCRange.end())
372
12
    return C;
373
33
374
33
  // Add the current SCC to the worklist as its shape has changed.
375
33
  UR.CWorklist.insert(C);
376
33
  LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
377
33
                    << "\n");
378
33
379
33
  SCC *OldC = C;
380
33
381
33
  // Update the current SCC. Note that if we have new SCCs, this must actually
382
33
  // change the SCC.
383
33
  assert(C != &*NewSCCRange.begin() &&
384
33
         "Cannot insert new SCCs without changing current SCC!");
385
33
  C = &*NewSCCRange.begin();
386
33
  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
387
33
388
33
  // If we had a cached FAM proxy originally, we will want to create more of
389
33
  // them for each SCC that was split off.
390
33
  bool NeedFAMProxy =
391
33
      AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
392
33
393
33
  // We need to propagate an invalidation call to all but the newly current SCC
394
33
  // because the outer pass manager won't do that for us after splitting them.
395
33
  // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
396
33
  // there are preserved analysis we can avoid invalidating them here for
397
33
  // split-off SCCs.
398
33
  // We know however that this will preserve any FAM proxy so go ahead and mark
399
33
  // that.
400
33
  PreservedAnalyses PA;
401
33
  PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
402
33
  AM.invalidate(*OldC, PA);
403
33
404
33
  // Ensure the now-current SCC's function analyses are updated.
405
33
  if (NeedFAMProxy)
406
33
    updateNewSCCFunctionAnalyses(*C, G, AM);
407
33
408
33
  for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
409
33
                                            NewSCCRange.end()))) {
410
14
    assert(C != &NewC && "No need to re-visit the current SCC!");
411
14
    assert(OldC != &NewC && "Already handled the original SCC!");
412
14
    UR.CWorklist.insert(&NewC);
413
14
    LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
414
14
415
14
    // Ensure new SCCs' function analyses are updated.
416
14
    if (NeedFAMProxy)
417
14
      updateNewSCCFunctionAnalyses(NewC, G, AM);
418
14
419
14
    // Also propagate a normal invalidation to the new SCC as only the current
420
14
    // will get one from the pass manager infrastructure.
421
14
    AM.invalidate(NewC, PA);
422
14
  }
423
33
  return C;
424
33
}
425
426
LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
427
    LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
428
1.45k
    CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
429
1.45k
  using Node = LazyCallGraph::Node;
430
1.45k
  using Edge = LazyCallGraph::Edge;
431
1.45k
  using SCC = LazyCallGraph::SCC;
432
1.45k
  using RefSCC = LazyCallGraph::RefSCC;
433
1.45k
434
1.45k
  RefSCC &InitialRC = InitialC.getOuterRefSCC();
435
1.45k
  SCC *C = &InitialC;
436
1.45k
  RefSCC *RC = &InitialRC;
437
1.45k
  Function &F = N.getFunction();
438
1.45k
439
1.45k
  // Walk the function body and build up the set of retained, promoted, and
440
1.45k
  // demoted edges.
441
1.45k
  SmallVector<Constant *, 16> Worklist;
442
1.45k
  SmallPtrSet<Constant *, 16> Visited;
443
1.45k
  SmallPtrSet<Node *, 16> RetainedEdges;
444
1.45k
  SmallSetVector<Node *, 4> PromotedRefTargets;
445
1.45k
  SmallSetVector<Node *, 4> DemotedCallTargets;
446
1.45k
447
1.45k
  // First walk the function and handle all called functions. We do this first
448
1.45k
  // because if there is a single call edge, whether there are ref edges is
449
1.45k
  // irrelevant.
450
1.45k
  for (Instruction &I : instructions(F))
451
8.64k
    if (auto CS = CallSite(&I))
452
3.28k
      if (Function *Callee = CS.getCalledFunction())
453
3.22k
        if (Visited.insert(Callee).second && 
!Callee->isDeclaration()1.80k
) {
454
169
          Node &CalleeN = *G.lookup(*Callee);
455
169
          Edge *E = N->lookup(CalleeN);
456
169
          // FIXME: We should really handle adding new calls. While it will
457
169
          // make downstream usage more complex, there is no fundamental
458
169
          // limitation and it will allow passes within the CGSCC to be a bit
459
169
          // more flexible in what transforms they can do. Until then, we
460
169
          // verify that new calls haven't been introduced.
461
169
          assert(E && "No function transformations should introduce *new* "
462
169
                      "call edges! Any new calls should be modeled as "
463
169
                      "promoted existing ref edges!");
464
169
          bool Inserted = RetainedEdges.insert(&CalleeN).second;
465
169
          (void)Inserted;
466
169
          assert(Inserted && "We should never visit a function twice.");
467
169
          if (!E->isCall())
468
51
            PromotedRefTargets.insert(&CalleeN);
469
169
        }
470
1.45k
471
1.45k
  // Now walk all references.
472
1.45k
  for (Instruction &I : instructions(F))
473
8.64k
    for (Value *Op : I.operand_values())
474
14.6k
      if (auto *C = dyn_cast<Constant>(Op))
475
6.70k
        if (Visited.insert(C).second)
476
2.11k
          Worklist.push_back(C);
477
1.45k
478
1.45k
  auto VisitRef = [&](Function &Referee) {
479
161
    Node &RefereeN = *G.lookup(Referee);
480
161
    Edge *E = N->lookup(RefereeN);
481
161
    // FIXME: Similarly to new calls, we also currently preclude
482
161
    // introducing new references. See above for details.
483
161
    assert(E && "No function transformations should introduce *new* ref "
484
161
                "edges! Any new ref edges would require IPO which "
485
161
                "function passes aren't allowed to do!");
486
161
    bool Inserted = RetainedEdges.insert(&RefereeN).second;
487
161
    (void)Inserted;
488
161
    assert(Inserted && "We should never visit a function twice.");
489
161
    if (E->isCall())
490
24
      DemotedCallTargets.insert(&RefereeN);
491
161
  };
492
1.45k
  LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
493
1.45k
494
1.45k
  // Include synthetic reference edges to known, defined lib functions.
495
1.45k
  for (auto *F : G.getLibFunctions())
496
21
    // While the list of lib functions doesn't have repeats, don't re-visit
497
21
    // anything handled above.
498
21
    if (!Visited.count(F))
499
19
      VisitRef(*F);
500
1.45k
501
1.45k
  // First remove all of the edges that are no longer present in this function.
502
1.45k
  // The first step makes these edges uniformly ref edges and accumulates them
503
1.45k
  // into a separate data structure so removal doesn't invalidate anything.
504
1.45k
  SmallVector<Node *, 4> DeadTargets;
505
1.45k
  for (Edge &E : *N) {
506
1.03k
    if (RetainedEdges.count(&E.getNode()))
507
330
      continue;
508
707
509
707
    SCC &TargetC = *G.lookupSCC(E.getNode());
510
707
    RefSCC &TargetRC = TargetC.getOuterRefSCC();
511
707
    if (&TargetRC == RC && 
E.isCall()51
) {
512
37
      if (C != &TargetC) {
513
2
        // For separate SCCs this is trivial.
514
2
        RC->switchTrivialInternalEdgeToRef(N, E.getNode());
515
35
      } else {
516
35
        // Now update the call graph.
517
35
        C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
518
35
                                   G, N, C, AM, UR);
519
35
      }
520
37
    }
521
707
522
707
    // Now that this is ready for actual removal, put it into our list.
523
707
    DeadTargets.push_back(&E.getNode());
524
707
  }
525
1.45k
  // Remove the easy cases quickly and actually pull them out of our list.
526
1.45k
  DeadTargets.erase(
527
1.45k
      llvm::remove_if(DeadTargets,
528
1.45k
                      [&](Node *TargetN) {
529
707
                        SCC &TargetC = *G.lookupSCC(*TargetN);
530
707
                        RefSCC &TargetRC = TargetC.getOuterRefSCC();
531
707
532
707
                        // We can't trivially remove internal targets, so skip
533
707
                        // those.
534
707
                        if (&TargetRC == RC)
535
51
                          return false;
536
656
537
656
                        RC->removeOutgoingEdge(N, *TargetN);
538
656
                        LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
539
656
                                          << N << "' to '" << TargetN << "'\n");
540
656
                        return true;
541
656
                      }),
542
1.45k
      DeadTargets.end());
543
1.45k
544
1.45k
  // Now do a batch removal of the internal ref edges left.
545
1.45k
  auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
546
1.45k
  if (!NewRefSCCs.empty()) {
547
21
    // The old RefSCC is dead, mark it as such.
548
21
    UR.InvalidatedRefSCCs.insert(RC);
549
21
550
21
    // Note that we don't bother to invalidate analyses as ref-edge
551
21
    // connectivity is not really observable in any way and is intended
552
21
    // exclusively to be used for ordering of transforms rather than for
553
21
    // analysis conclusions.
554
21
555
21
    // Update RC to the "bottom".
556
21
    assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
557
21
    RC = &C->getOuterRefSCC();
558
21
    assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
559
21
560
21
    // The RC worklist is in reverse postorder, so we enqueue the new ones in
561
21
    // RPO except for the one which contains the source node as that is the
562
21
    // "bottom" we will continue processing in the bottom-up walk.
563
21
    assert(NewRefSCCs.front() == RC &&
564
21
           "New current RefSCC not first in the returned list!");
565
21
    for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
566
31
                                                  NewRefSCCs.end()))) {
567
31
      assert(NewRC != RC && "Should not encounter the current RefSCC further "
568
31
                            "in the postorder list of new RefSCCs.");
569
31
      UR.RCWorklist.insert(NewRC);
570
31
      LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
571
31
                        << *NewRC << "\n");
572
31
    }
573
21
  }
574
1.45k
575
1.45k
  // Next demote all the call edges that are now ref edges. This helps make
576
1.45k
  // the SCCs small which should minimize the work below as we don't want to
577
1.45k
  // form cycles that this would break.
578
1.45k
  for (Node *RefTarget : DemotedCallTargets) {
579
24
    SCC &TargetC = *G.lookupSCC(*RefTarget);
580
24
    RefSCC &TargetRC = TargetC.getOuterRefSCC();
581
24
582
24
    // The easy case is when the target RefSCC is not this RefSCC. This is
583
24
    // only supported when the target RefSCC is a child of this RefSCC.
584
24
    if (&TargetRC != RC) {
585
14
      assert(RC->isAncestorOf(TargetRC) &&
586
14
             "Cannot potentially form RefSCC cycles here!");
587
14
      RC->switchOutgoingEdgeToRef(N, *RefTarget);
588
14
      LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
589
14
                        << "' to '" << *RefTarget << "'\n");
590
14
      continue;
591
14
    }
592
10
593
10
    // We are switching an internal call edge to a ref edge. This may split up
594
10
    // some SCCs.
595
10
    if (C != &TargetC) {
596
0
      // For separate SCCs this is trivial.
597
0
      RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
598
0
      continue;
599
0
    }
600
10
601
10
    // Now update the call graph.
602
10
    C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
603
10
                               C, AM, UR);
604
10
  }
605
1.45k
606
1.45k
  // Now promote ref edges into call edges.
607
1.45k
  for (Node *CallTarget : PromotedRefTargets) {
608
51
    SCC &TargetC = *G.lookupSCC(*CallTarget);
609
51
    RefSCC &TargetRC = TargetC.getOuterRefSCC();
610
51
611
51
    // The easy case is when the target RefSCC is not this RefSCC. This is
612
51
    // only supported when the target RefSCC is a child of this RefSCC.
613
51
    if (&TargetRC != RC) {
614
5
      assert(RC->isAncestorOf(TargetRC) &&
615
5
             "Cannot potentially form RefSCC cycles here!");
616
5
      RC->switchOutgoingEdgeToCall(N, *CallTarget);
617
5
      LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
618
5
                        << "' to '" << *CallTarget << "'\n");
619
5
      continue;
620
5
    }
621
46
    LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
622
46
                      << N << "' to '" << *CallTarget << "'\n");
623
46
624
46
    // Otherwise we are switching an internal ref edge to a call edge. This
625
46
    // may merge away some SCCs, and we add those to the UpdateResult. We also
626
46
    // need to make sure to update the worklist in the event SCCs have moved
627
46
    // before the current one in the post-order sequence
628
46
    bool HasFunctionAnalysisProxy = false;
629
46
    auto InitialSCCIndex = RC->find(*C) - RC->begin();
630
46
    bool FormedCycle = RC->switchInternalEdgeToCall(
631
46
        N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
632
22
          for (SCC *MergedC : MergedSCCs) {
633
22
            assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
634
22
635
22
            HasFunctionAnalysisProxy |=
636
22
                AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
637
22
                    *MergedC) != nullptr;
638
22
639
22
            // Mark that this SCC will no longer be valid.
640
22
            UR.InvalidatedSCCs.insert(MergedC);
641
22
642
22
            // FIXME: We should really do a 'clear' here to forcibly release
643
22
            // memory, but we don't have a good way of doing that and
644
22
            // preserving the function analyses.
645
22
            auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
646
22
            PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
647
22
            AM.invalidate(*MergedC, PA);
648
22
          }
649
22
        });
650
46
651
46
    // If we formed a cycle by creating this call, we need to update more data
652
46
    // structures.
653
46
    if (FormedCycle) {
654
20
      C = &TargetC;
655
20
      assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
656
20
657
20
      // If one of the invalidated SCCs had a cached proxy to a function
658
20
      // analysis manager, we need to create a proxy in the new current SCC as
659
20
      // the invalidated SCCs had their functions moved.
660
20
      if (HasFunctionAnalysisProxy)
661
20
        AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
662
20
663
20
      // Any analyses cached for this SCC are no longer precise as the shape
664
20
      // has changed by introducing this cycle. However, we have taken care to
665
20
      // update the proxies so it remains valide.
666
20
      auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
667
20
      PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
668
20
      AM.invalidate(*C, PA);
669
20
    }
670
46
    auto NewSCCIndex = RC->find(*C) - RC->begin();
671
46
    // If we have actually moved an SCC to be topologically "below" the current
672
46
    // one due to merging, we will need to revisit the current SCC after
673
46
    // visiting those moved SCCs.
674
46
    //
675
46
    // It is critical that we *do not* revisit the current SCC unless we
676
46
    // actually move SCCs in the process of merging because otherwise we may
677
46
    // form a cycle where an SCC is split apart, merged, split, merged and so
678
46
    // on infinitely.
679
46
    if (InitialSCCIndex < NewSCCIndex) {
680
2
      // Put our current SCC back onto the worklist as we'll visit other SCCs
681
2
      // that are now definitively ordered prior to the current one in the
682
2
      // post-order sequence, and may end up observing more precise context to
683
2
      // optimize the current SCC.
684
2
      UR.CWorklist.insert(C);
685
2
      LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
686
2
                        << "\n");
687
2
      // Enqueue in reverse order as we pop off the back of the worklist.
688
2
      for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
689
2
                                                  RC->begin() + NewSCCIndex))) {
690
2
        UR.CWorklist.insert(&MovedC);
691
2
        LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
692
2
                          << MovedC << "\n");
693
2
      }
694
2
    }
695
46
  }
696
1.45k
697
1.45k
  assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
698
1.45k
  assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
699
1.45k
  assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
700
1.45k
701
1.45k
  // Record the current RefSCC and SCC for higher layers of the CGSCC pass
702
1.45k
  // manager now that all the updates have been applied.
703
1.45k
  if (RC != &InitialRC)
704
21
    UR.UpdatedRC = RC;
705
1.45k
  if (C != &InitialC)
706
37
    UR.UpdatedC = C;
707
1.45k
708
1.45k
  return *C;
709
1.45k
}