Coverage Report

Created: 2020-09-19 12:23

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp
Line
Count
Source (jump to first uncovered line)
1
//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
//  This file defines ExprEngine's support for calls and returns.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "PrettyStackTraceLocationContext.h"
14
#include "clang/AST/CXXInheritance.h"
15
#include "clang/AST/Decl.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/Analysis/Analyses/LiveVariables.h"
18
#include "clang/Analysis/ConstructionContext.h"
19
#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20
#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
21
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
22
#include "llvm/ADT/SmallSet.h"
23
#include "llvm/ADT/Statistic.h"
24
#include "llvm/Support/Casting.h"
25
#include "llvm/Support/Compiler.h"
26
#include "llvm/Support/SaveAndRestore.h"
27
28
using namespace clang;
29
using namespace ento;
30
31
#define DEBUG_TYPE "ExprEngine"
32
33
STATISTIC(NumOfDynamicDispatchPathSplits,
34
  "The # of times we split the path due to imprecise dynamic dispatch info");
35
36
STATISTIC(NumInlinedCalls,
37
  "The # of times we inlined a call");
38
39
STATISTIC(NumReachedInlineCountMax,
40
  "The # of times we reached inline count maximum");
41
42
void ExprEngine::processCallEnter(NodeBuilderContext& BC, CallEnter CE,
43
33.7k
                                  ExplodedNode *Pred) {
44
  // Get the entry block in the CFG of the callee.
45
33.7k
  const StackFrameContext *calleeCtx = CE.getCalleeContext();
46
33.7k
  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
47
33.7k
  const CFGBlock *Entry = CE.getEntry();
48
33.7k
49
  // Validate the CFG.
50
33.7k
  assert(Entry->empty());
51
33.7k
  assert(Entry->succ_size() == 1);
52
33.7k
53
  // Get the solitary successor.
54
33.7k
  const CFGBlock *Succ = *(Entry->succ_begin());
55
33.7k
56
  // Construct an edge representing the starting location in the callee.
57
33.7k
  BlockEdge Loc(Entry, Succ, calleeCtx);
58
33.7k
59
33.7k
  ProgramStateRef state = Pred->getState();
60
33.7k
61
  // Construct a new node, notify checkers that analysis of the function has
62
  // begun, and add the resultant nodes to the worklist.
63
33.7k
  bool isNew;
64
33.7k
  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
65
33.7k
  Node->addPredecessor(Pred, G);
66
33.7k
  if (isNew) {
67
33.7k
    ExplodedNodeSet DstBegin;
68
33.7k
    processBeginOfFunction(BC, Node, DstBegin, Loc);
69
33.7k
    Engine.enqueue(DstBegin);
70
33.7k
  }
71
33.7k
}
72
73
// Find the last statement on the path to the exploded node and the
74
// corresponding Block.
75
static std::pair<const Stmt*,
76
62.9k
                 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
77
62.9k
  const Stmt *S = nullptr;
78
62.9k
  const CFGBlock *Blk = nullptr;
79
62.9k
  const StackFrameContext *SF = Node->getStackFrame();
80
62.9k
81
  // Back up through the ExplodedGraph until we reach a statement node in this
82
  // stack frame.
83
179k
  while (Node) {
84
179k
    const ProgramPoint &PP = Node->getLocation();
85
179k
86
179k
    if (PP.getStackFrame() == SF) {
87
176k
      if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
88
56.5k
        S = SP->getStmt();
89
56.5k
        break;
90
119k
      } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
91
2.83k
        S = CEE->getCalleeContext()->getCallSite();
92
2.83k
        if (S)
93
2.47k
          break;
94
364
95
        // If there is no statement, this is an implicitly-generated call.
96
        // We'll walk backwards over it and then continue the loop to find
97
        // an actual statement.
98
364
        Optional<CallEnter> CE;
99
5.81k
        do {
100
5.81k
          Node = Node->getFirstPred();
101
5.81k
          CE = Node->getLocationAs<CallEnter>();
102
5.81k
        } while (!CE || 
CE->getCalleeContext() != CEE->getCalleeContext()526
);
103
364
104
        // Continue searching the graph.
105
116k
      } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
106
65.6k
        Blk = BE->getSrc();
107
65.6k
      }
108
3.38k
    } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
109
      // If we reached the CallEnter for this function, it has no statements.
110
3.38k
      if (CE->getCalleeContext() == SF)
111
3.38k
        break;
112
117k
    }
113
117k
114
117k
    if (Node->pred_empty())
115
623
      return std::make_pair(nullptr, nullptr);
116
116k
117
116k
    Node = *Node->pred_begin();
118
116k
  }
119
62.9k
120
62.3k
  return std::make_pair(S, Blk);
121
62.9k
}
122
123
/// Adjusts a return value when the called function's return type does not
124
/// match the caller's expression type. This can happen when a dynamic call
125
/// is devirtualized, and the overriding method has a covariant (more specific)
126
/// return type than the parent's method. For C++ objects, this means we need
127
/// to add base casts.
128
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
129
81
                              StoreManager &StoreMgr) {
130
  // For now, the only adjustments we handle apply only to locations.
131
81
  if (!V.getAs<Loc>())
132
67
    return V;
133
14
134
  // If the types already match, don't do any unnecessary work.
135
14
  ExpectedTy = ExpectedTy.getCanonicalType();
136
14
  ActualTy = ActualTy.getCanonicalType();
137
14
  if (ExpectedTy == ActualTy)
138
11
    return V;
139
3
140
  // No adjustment is needed between Objective-C pointer types.
141
3
  if (ExpectedTy->isObjCObjectPointerType() &&
142
1
      ActualTy->isObjCObjectPointerType())
143
1
    return V;
144
2
145
  // C++ object pointers may need "derived-to-base" casts.
146
2
  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
147
2
  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
148
2
  if (ExpectedClass && 
ActualClass1
) {
149
1
    CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
150
1
                       /*DetectVirtual=*/false);
151
1
    if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
152
1
        !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
153
1
      return StoreMgr.evalDerivedToBase(V, Paths.front());
154
1
    }
155
1
  }
156
1
157
  // Unfortunately, Objective-C does not enforce that overridden methods have
158
  // covariant return types, so we can't assert that that never happens.
159
  // Be safe and return UnknownVal().
160
1
  return UnknownVal();
161
1
}
162
163
void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
164
                                           ExplodedNode *Pred,
165
21.7k
                                           ExplodedNodeSet &Dst) {
166
  // Find the last statement in the function and the corresponding basic block.
167
21.7k
  const Stmt *LastSt = nullptr;
168
21.7k
  const CFGBlock *Blk = nullptr;
169
21.7k
  std::tie(LastSt, Blk) = getLastStmt(Pred);
170
21.7k
  if (!Blk || 
!LastSt21.1k
) {
171
623
    Dst.Add(Pred);
172
623
    return;
173
623
  }
174
21.1k
175
  // Here, we destroy the current location context. We use the current
176
  // function's entire body as a diagnostic statement, with which the program
177
  // point will be associated. However, we only want to use LastStmt as a
178
  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
179
  // is dead.
180
21.1k
  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
181
21.1k
  const LocationContext *LCtx = Pred->getLocationContext();
182
21.1k
  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
183
21.1k
             LCtx->getAnalysisDeclContext()->getBody(),
184
21.1k
             ProgramPoint::PostStmtPurgeDeadSymbolsKind);
185
21.1k
}
186
187
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
188
16.0k
    const StackFrameContext *calleeCtx) {
189
16.0k
  const Decl *RuntimeCallee = calleeCtx->getDecl();
190
16.0k
  const Decl *StaticDecl = Call->getDecl();
191
16.0k
  assert(RuntimeCallee);
192
16.0k
  if (!StaticDecl)
193
0
    return true;
194
16.0k
  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
195
16.0k
}
196
197
/// The call exit is simulated with a sequence of nodes, which occur between
198
/// CallExitBegin and CallExitEnd. The following operations occur between the
199
/// two program points:
200
/// 1. CallExitBegin (triggers the start of call exit sequence)
201
/// 2. Bind the return value
202
/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
203
/// 4. CallExitEnd (switch to the caller context)
204
/// 5. PostStmt<CallExpr>
205
41.2k
void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
206
  // Step 1 CEBNode was generated before the call.
207
41.2k
  PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
208
41.2k
  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
209
41.2k
210
  // The parent context might not be a stack frame, so make sure we
211
  // look up the first enclosing stack frame.
212
41.2k
  const StackFrameContext *callerCtx =
213
41.2k
    calleeCtx->getParent()->getStackFrame();
214
41.2k
215
41.2k
  const Stmt *CE = calleeCtx->getCallSite();
216
41.2k
  ProgramStateRef state = CEBNode->getState();
217
  // Find the last statement in the function and the corresponding basic block.
218
41.2k
  const Stmt *LastSt = nullptr;
219
41.2k
  const CFGBlock *Blk = nullptr;
220
41.2k
  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
221
41.2k
222
  // Generate a CallEvent /before/ cleaning the state, so that we can get the
223
  // correct value for 'this' (if necessary).
224
41.2k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
225
41.2k
  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
226
41.2k
227
  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
228
41.2k
229
  // If the callee returns an expression, bind its value to CallExpr.
230
41.2k
  if (CE) {
231
40.5k
    if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
232
16.0k
      const LocationContext *LCtx = CEBNode->getLocationContext();
233
16.0k
      SVal V = state->getSVal(RS, LCtx);
234
16.0k
235
      // Ensure that the return type matches the type of the returned Expr.
236
16.0k
      if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
237
81
        QualType ReturnedTy =
238
81
          CallEvent::getDeclaredResultType(calleeCtx->getDecl());
239
81
        if (!ReturnedTy.isNull()) {
240
81
          if (const Expr *Ex = dyn_cast<Expr>(CE)) {
241
81
            V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
242
81
                                  getStoreManager());
243
81
          }
244
81
        }
245
81
      }
246
16.0k
247
16.0k
      state = state->BindExpr(CE, callerCtx, V);
248
16.0k
    }
249
40.5k
250
    // Bind the constructed object value to CXXConstructExpr.
251
40.5k
    if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
252
8.80k
      loc::MemRegionVal This =
253
8.80k
        svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
254
8.80k
      SVal ThisV = state->getSVal(This);
255
8.80k
      ThisV = state->getSVal(ThisV.castAs<Loc>());
256
8.80k
      state = state->BindExpr(CCE, callerCtx, ThisV);
257
8.80k
    }
258
40.5k
259
40.5k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
260
      // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
261
      // while to reach the actual CXXNewExpr element from here, so keep the
262
      // region for later use.
263
      // Additionally cast the return value of the inlined operator new
264
      // (which is of type 'void *') to the correct object type.
265
366
      SVal AllocV = state->getSVal(CNE, callerCtx);
266
366
      AllocV = svalBuilder.evalCast(
267
366
          AllocV, CNE->getType(),
268
366
          getContext().getPointerType(getContext().VoidTy));
269
366
270
366
      state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
271
366
                                         AllocV);
272
366
    }
273
40.5k
  }
274
41.2k
275
  // Step 3: BindedRetNode -> CleanedNodes
276
  // If we can find a statement and a block in the inlined function, run remove
277
  // dead bindings before returning from the call. This is important to ensure
278
  // that we report the issues such as leaks in the stack contexts in which
279
  // they occurred.
280
41.2k
  ExplodedNodeSet CleanedNodes;
281
41.2k
  if (LastSt && 
Blk37.8k
&&
AMgr.options.AnalysisPurgeOpt != PurgeNone37.8k
) {
282
37.8k
    static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
283
37.8k
    PostStmt Loc(LastSt, calleeCtx, &retValBind);
284
37.8k
    bool isNew;
285
37.8k
    ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
286
37.8k
    BindedRetNode->addPredecessor(CEBNode, G);
287
37.8k
    if (!isNew)
288
0
      return;
289
37.8k
290
37.8k
    NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
291
37.8k
    currBldrCtx = &Ctx;
292
    // Here, we call the Symbol Reaper with 0 statement and callee location
293
    // context, telling it to clean up everything in the callee's context
294
    // (and its children). We use the callee's function body as a diagnostic
295
    // statement, with which the program point will be associated.
296
37.8k
    removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
297
37.8k
               calleeCtx->getAnalysisDeclContext()->getBody(),
298
37.8k
               ProgramPoint::PostStmtPurgeDeadSymbolsKind);
299
37.8k
    currBldrCtx = nullptr;
300
3.38k
  } else {
301
3.38k
    CleanedNodes.Add(CEBNode);
302
3.38k
  }
303
41.2k
304
41.2k
  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
305
80.6k
                                 E = CleanedNodes.end(); I != E; 
++I39.4k
) {
306
39.4k
307
    // Step 4: Generate the CallExit and leave the callee's context.
308
    // CleanedNodes -> CEENode
309
39.4k
    CallExitEnd Loc(calleeCtx, callerCtx);
310
39.4k
    bool isNew;
311
36.0k
    ProgramStateRef CEEState = (*I == CEBNode) ? 
state3.38k
: (*I)->getState();
312
39.4k
313
39.4k
    ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
314
39.4k
    CEENode->addPredecessor(*I, G);
315
39.4k
    if (!isNew)
316
0
      return;
317
39.4k
318
    // Step 5: Perform the post-condition check of the CallExpr and enqueue the
319
    // result onto the work list.
320
    // CEENode -> Dst -> WorkList
321
39.4k
    NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
322
39.4k
    SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
323
39.4k
        &Ctx);
324
39.4k
    SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
325
39.4k
326
39.4k
    CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
327
39.4k
328
39.4k
    ExplodedNodeSet DstPostCall;
329
39.4k
    if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
330
366
      ExplodedNodeSet DstPostPostCallCallback;
331
366
      getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
332
366
                                                 CEENode, *UpdatedCall, *this,
333
366
                                                 /*wasInlined=*/true);
334
366
      for (ExplodedNode *I : DstPostPostCallCallback) {
335
366
        getCheckerManager().runCheckersForNewAllocator(
336
366
            cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
337
366
            /*wasInlined=*/true);
338
366
      }
339
39.0k
    } else {
340
39.0k
      getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
341
39.0k
                                                 *UpdatedCall, *this,
342
39.0k
                                                 /*wasInlined=*/true);
343
39.0k
    }
344
39.4k
    ExplodedNodeSet Dst;
345
39.4k
    if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
346
591
      getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
347
591
                                                        *this,
348
591
                                                        /*wasInlined=*/true);
349
38.8k
    } else if (CE &&
350
38.1k
               !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
351
37.7k
                 
AMgr.getAnalyzerOptions().MayInlineCXXAllocator366
)) {
352
37.7k
      getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
353
37.7k
                                                 *this, /*wasInlined=*/true);
354
1.07k
    } else {
355
1.07k
      Dst.insert(DstPostCall);
356
1.07k
    }
357
39.4k
358
    // Enqueue the next element in the block.
359
39.4k
    for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
360
78.8k
                                   PSI != PSE; 
++PSI39.4k
) {
361
39.4k
      Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
362
39.4k
                                    calleeCtx->getIndex()+1);
363
39.4k
    }
364
39.4k
  }
365
41.2k
}
366
367
66.7k
bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
368
  // When there are no branches in the function, it means that there's no
369
  // exponential complexity introduced by inlining such function.
370
  // Such functions also don't trigger various fundamental problems
371
  // with our inlining mechanism, such as the problem of
372
  // inlined defensive checks. Hence isLinear().
373
66.7k
  const CFG *Cfg = ADC->getCFG();
374
66.7k
  return Cfg->isLinear() || 
Cfg->size() <= AMgr.options.AlwaysInlineSize18.6k
;
375
66.7k
}
376
377
16.7k
bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
378
16.7k
  const CFG *Cfg = ADC->getCFG();
379
16.7k
  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
380
16.7k
}
381
382
4.66k
bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
383
4.66k
  const CFG *Cfg = ADC->getCFG();
384
4.66k
  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
385
4.66k
}
386
387
void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
388
33.6k
                               bool &IsRecursive, unsigned &StackDepth) {
389
33.6k
  IsRecursive = false;
390
33.6k
  StackDepth = 0;
391
33.6k
392
102k
  while (LCtx) {
393
69.0k
    if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
394
69.0k
      const Decl *DI = SFC->getDecl();
395
69.0k
396
      // Mark recursive (and mutually recursive) functions and always count
397
      // them when measuring the stack depth.
398
69.0k
      if (DI == D) {
399
2.53k
        IsRecursive = true;
400
2.53k
        ++StackDepth;
401
2.53k
        LCtx = LCtx->getParent();
402
2.53k
        continue;
403
2.53k
      }
404
66.4k
405
      // Do not count the small functions when determining the stack depth.
406
66.4k
      AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
407
66.4k
      if (!isSmall(CalleeADC))
408
18.6k
        ++StackDepth;
409
66.4k
    }
410
66.5k
    LCtx = LCtx->getParent();
411
66.5k
  }
412
33.6k
}
413
414
// The GDM component containing the dynamic dispatch bifurcation info. When
415
// the exact type of the receiver is not known, we want to explore both paths -
416
// one on which we do inline it and the other one on which we don't. This is
417
// done to ensure we do not drop coverage.
418
// This is the map from the receiver region to a bool, specifying either we
419
// consider this region's information precise or not along the given path.
420
namespace {
421
  enum DynamicDispatchMode {
422
    DynamicDispatchModeInlined = 1,
423
    DynamicDispatchModeConservative
424
  };
425
} // end anonymous namespace
426
427
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
428
                               const MemRegion *, unsigned)
429
430
bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
431
                            NodeBuilder &Bldr, ExplodedNode *Pred,
432
33.7k
                            ProgramStateRef State) {
433
33.7k
  assert(D);
434
33.7k
435
33.7k
  const LocationContext *CurLC = Pred->getLocationContext();
436
33.7k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
437
33.7k
  const LocationContext *ParentOfCallee = CallerSFC;
438
33.7k
  if (Call.getKind() == CE_Block &&
439
177
      !cast<BlockCall>(Call).isConversionFromLambda()) {
440
170
    const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
441
170
    assert(BR && "If we have the block definition we should have its region");
442
170
    AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
443
170
    ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
444
170
                                                         cast<BlockDecl>(D),
445
170
                                                         BR);
446
170
  }
447
33.7k
448
  // This may be NULL, but that's fine.
449
33.7k
  const Expr *CallE = Call.getOriginExpr();
450
33.7k
451
  // Construct a new stack frame for the callee.
452
33.7k
  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
453
33.7k
  const StackFrameContext *CalleeSFC =
454
33.7k
      CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
455
33.7k
                               currBldrCtx->blockCount(), currStmtIdx);
456
33.7k
457
33.7k
  CallEnter Loc(CallE, CalleeSFC, CurLC);
458
33.7k
459
  // Construct a new state which contains the mapping from actual to
460
  // formal arguments.
461
33.7k
  State = State->enterStackFrame(Call, CalleeSFC);
462
33.7k
463
33.7k
  bool isNew;
464
33.7k
  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
465
33.7k
    N->addPredecessor(Pred, G);
466
33.7k
    if (isNew)
467
33.7k
      Engine.getWorkList()->enqueue(N);
468
33.7k
  }
469
33.7k
470
  // If we decided to inline the call, the successor has been manually
471
  // added onto the work list so remove it from the node builder.
472
33.7k
  Bldr.takeNodes(Pred);
473
33.7k
474
33.7k
  NumInlinedCalls++;
475
33.7k
  Engine.FunctionSummaries->bumpNumTimesInlined(D);
476
33.7k
477
  // Mark the decl as visited.
478
33.7k
  if (VisitedCallees)
479
33.7k
    VisitedCallees->insert(D);
480
33.7k
481
33.7k
  return true;
482
33.7k
}
483
484
static ProgramStateRef getInlineFailedState(ProgramStateRef State,
485
65.1k
                                            const Stmt *CallE) {
486
65.1k
  const void *ReplayState = State->get<ReplayWithoutInlining>();
487
65.1k
  if (!ReplayState)
488
65.1k
    return nullptr;
489
35
490
35
  assert(ReplayState == CallE && "Backtracked to the wrong call.");
491
35
  (void)CallE;
492
35
493
35
  return State->remove<ReplayWithoutInlining>();
494
35
}
495
496
void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
497
69.1k
                               ExplodedNodeSet &dst) {
498
  // Perform the previsit of the CallExpr.
499
69.1k
  ExplodedNodeSet dstPreVisit;
500
69.1k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
501
69.1k
502
  // Get the call in its initial state. We use this as a template to perform
503
  // all the checks.
504
69.1k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
505
69.1k
  CallEventRef<> CallTemplate
506
69.1k
    = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
507
69.1k
508
  // Evaluate the function call.  We try each of the checkers
509
  // to see if the can evaluate the function call.
510
69.1k
  ExplodedNodeSet dstCallEvaluated;
511
69.1k
  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
512
138k
       I != E; 
++I69.0k
) {
513
69.0k
    evalCall(dstCallEvaluated, *I, *CallTemplate);
514
69.0k
  }
515
69.1k
516
  // Finally, perform the post-condition check of the CallExpr and store
517
  // the created nodes in 'Dst'.
518
  // Note that if the call was inlined, dstCallEvaluated will be empty.
519
  // The post-CallExpr check will occur in processCallExit.
520
69.1k
  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
521
69.1k
                                             *this);
522
69.1k
}
523
524
ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
525
103k
                                                       const CallEvent &Call) {
526
103k
  const Expr *E = Call.getOriginExpr();
527
  // FIXME: Constructors to placement arguments of operator new
528
  // are not supported yet.
529
103k
  if (!E || 
isa<CXXNewExpr>(E)102k
)
530
1.07k
    return State;
531
102k
532
102k
  const LocationContext *LC = Call.getLocationContext();
533
191k
  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; 
++CallI88.7k
) {
534
88.7k
    unsigned I = Call.getASTArgumentIndex(CallI);
535
88.7k
    if (Optional<SVal> V =
536
11.6k
            getObjectUnderConstruction(State, {E, I}, LC)) {
537
11.6k
      SVal VV = *V;
538
11.6k
      (void)VV;
539
11.6k
      assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
540
11.6k
                 ->getStackFrame()->getParent()
541
11.6k
                 ->getStackFrame() == LC->getStackFrame());
542
11.6k
      State = finishObjectConstruction(State, {E, I}, LC);
543
11.6k
    }
544
88.7k
  }
545
102k
546
102k
  return State;
547
102k
}
548
549
void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
550
                                            ExplodedNode *Pred,
551
62.1k
                                            const CallEvent &Call) {
552
62.1k
  ProgramStateRef State = Pred->getState();
553
62.1k
  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
554
62.1k
  if (CleanedState == State) {
555
52.2k
    Dst.insert(Pred);
556
52.2k
    return;
557
52.2k
  }
558
9.88k
559
9.88k
  const Expr *E = Call.getOriginExpr();
560
9.88k
  const LocationContext *LC = Call.getLocationContext();
561
9.88k
  NodeBuilder B(Pred, Dst, *currBldrCtx);
562
9.88k
  static SimpleProgramPointTag Tag("ExprEngine",
563
9.88k
                                   "Finish argument construction");
564
9.88k
  PreStmt PP(E, LC, &Tag);
565
9.88k
  B.generateNode(PP, CleanedState, Pred);
566
9.88k
}
567
568
void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
569
69.0k
                          const CallEvent &Call) {
570
  // WARNING: At this time, the state attached to 'Call' may be older than the
571
  // state in 'Pred'. This is a minor optimization since CheckerManager will
572
  // use an updated CallEvent instance when calling checkers, but if 'Call' is
573
  // ever used directly in this function all callers should be updated to pass
574
  // the most recent state. (It is probably not worth doing the work here since
575
  // for some callers this will not be necessary.)
576
69.0k
577
  // Run any pre-call checks using the generic call interface.
578
69.0k
  ExplodedNodeSet dstPreVisit;
579
69.0k
  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
580
69.0k
                                            Call, *this);
581
69.0k
582
  // Actually evaluate the function call.  We try each of the checkers
583
  // to see if the can evaluate the function call, and get a callback at
584
  // defaultEvalCall if all of them fail.
585
69.0k
  ExplodedNodeSet dstCallEvaluated;
586
69.0k
  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
587
69.0k
                                             Call, *this, EvalCallOptions());
588
69.0k
589
  // If there were other constructors called for object-type arguments
590
  // of this call, clean them up.
591
69.0k
  ExplodedNodeSet dstArgumentCleanup;
592
69.0k
  for (ExplodedNode *I : dstCallEvaluated)
593
44.4k
    finishArgumentConstruction(dstArgumentCleanup, I, Call);
594
69.0k
595
69.0k
  ExplodedNodeSet dstPostCall;
596
69.0k
  getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
597
69.0k
                                             Call, *this);
598
69.0k
599
  // Escaping symbols conjured during invalidating the regions above.
600
  // Note that, for inlined calls the nodes were put back into the worklist,
601
  // so we can assume that every node belongs to a conservative call at this
602
  // point.
603
69.0k
604
  // Run pointerEscape callback with the newly conjured symbols.
605
69.0k
  SmallVector<std::pair<SVal, SVal>, 8> Escaped;
606
43.8k
  for (ExplodedNode *I : dstPostCall) {
607
43.8k
    NodeBuilder B(I, Dst, *currBldrCtx);
608
43.8k
    ProgramStateRef State = I->getState();
609
43.8k
    Escaped.clear();
610
43.8k
    {
611
43.8k
      unsigned Arg = -1;
612
43.1k
      for (const ParmVarDecl *PVD : Call.parameters()) {
613
43.1k
        ++Arg;
614
43.1k
        QualType ParamTy = PVD->getType();
615
43.1k
        if (ParamTy.isNull() ||
616
43.1k
            (!ParamTy->isPointerType() && 
!ParamTy->isReferenceType()32.7k
))
617
26.3k
          continue;
618
16.8k
        QualType Pointee = ParamTy->getPointeeType();
619
16.8k
        if (Pointee.isConstQualified() || 
Pointee->isVoidType()5.70k
)
620
12.2k
          continue;
621
4.57k
        if (const MemRegion *MR = Call.getArgSVal(Arg).getAsRegion())
622
4.29k
          Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
623
4.57k
      }
624
43.8k
    }
625
43.8k
626
43.8k
    State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
627
43.8k
                                        PSK_EscapeOutParameters, &Call);
628
43.8k
629
43.8k
    if (State == I->getState())
630
43.8k
      Dst.insert(I);
631
6
    else
632
6
      B.generateNode(I->getLocation(), State, I);
633
43.8k
  }
634
69.0k
}
635
636
ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
637
                                            const LocationContext *LCtx,
638
42.1k
                                            ProgramStateRef State) {
639
42.1k
  const Expr *E = Call.getOriginExpr();
640
42.1k
  if (!E)
641
770
    return State;
642
41.4k
643
  // Some method families have known return values.
644
41.4k
  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
645
3.46k
    switch (Msg->getMethodFamily()) {
646
3.15k
    default:
647
3.15k
      break;
648
311
    case OMF_autorelease:
649
311
    case OMF_retain:
650
311
    case OMF_self: {
651
      // These methods return their receivers.
652
311
      return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
653
37.9k
    }
654
37.9k
    }
655
37.9k
  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
656
12.3k
    SVal ThisV = C->getCXXThisVal();
657
12.3k
    ThisV = State->getSVal(ThisV.castAs<Loc>());
658
12.3k
    return State->BindExpr(E, LCtx, ThisV);
659
12.3k
  }
660
28.7k
661
28.7k
  SVal R;
662
28.7k
  QualType ResultTy = Call.getResultType();
663
28.7k
  unsigned Count = currBldrCtx->blockCount();
664
28.7k
  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
665
    // Conjure a temporary if the function returns an object by value.
666
2.11k
    SVal Target;
667
2.11k
    assert(RTC->getStmt() == Call.getOriginExpr());
668
2.11k
    EvalCallOptions CallOpts; // FIXME: We won't really need those.
669
2.11k
    std::tie(State, Target) =
670
2.11k
        handleConstructionContext(Call.getOriginExpr(), State, LCtx,
671
2.11k
                                  RTC->getConstructionContext(), CallOpts);
672
2.11k
    const MemRegion *TargetR = Target.getAsRegion();
673
2.11k
    assert(TargetR);
674
    // Invalidate the region so that it didn't look uninitialized. If this is
675
    // a field or element constructor, we do not want to invalidate
676
    // the whole structure. Pointer escape is meaningless because
677
    // the structure is a product of conservative evaluation
678
    // and therefore contains nothing interesting at this point.
679
2.11k
    RegionAndSymbolInvalidationTraits ITraits;
680
2.11k
    ITraits.setTrait(TargetR,
681
2.11k
        RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
682
2.11k
    State = State->invalidateRegions(TargetR, E, Count, LCtx,
683
2.11k
                                     /* CausesPointerEscape=*/false, nullptr,
684
2.11k
                                     &Call, &ITraits);
685
2.11k
686
2.11k
    R = State->getSVal(Target.castAs<Loc>(), E->getType());
687
26.6k
  } else {
688
    // Conjure a symbol if the return value is unknown.
689
26.6k
690
    // See if we need to conjure a heap pointer instead of
691
    // a regular unknown pointer.
692
26.6k
    bool IsHeapPointer = false;
693
26.6k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
694
652
      if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
695
        // FIXME: Delegate this to evalCall in MallocChecker?
696
604
        IsHeapPointer = true;
697
604
      }
698
26.6k
699
604
    R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
700
26.0k
                      : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
701
26.0k
                                                     Count);
702
26.6k
  }
703
28.7k
  return State->BindExpr(E, LCtx, R);
704
28.7k
}
705
706
// Conservatively evaluate call by invalidating regions and binding
707
// a conjured return value.
708
void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
709
31.3k
                                      ExplodedNode *Pred, ProgramStateRef State) {
710
31.3k
  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
711
31.3k
  State = bindReturnValue(Call, Pred->getLocationContext(), State);
712
31.3k
713
  // And make the result node.
714
31.3k
  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
715
31.3k
}
716
717
ExprEngine::CallInlinePolicy
718
ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
719
                              AnalyzerOptions &Opts,
720
33.9k
                              const EvalCallOptions &CallOpts) {
721
33.9k
  const LocationContext *CurLC = Pred->getLocationContext();
722
33.9k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
723
33.9k
  switch (Call.getKind()) {
724
16.7k
  case CE_Function:
725
16.7k
  case CE_Block:
726
16.7k
    break;
727
6.70k
  case CE_CXXMember:
728
6.70k
  case CE_CXXMemberOperator:
729
6.70k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
730
0
      return CIP_DisallowedAlways;
731
6.70k
    break;
732
9.02k
  case CE_CXXConstructor: {
733
9.02k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
734
0
      return CIP_DisallowedAlways;
735
9.02k
736
9.02k
    const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
737
9.02k
738
9.02k
    const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
739
9.02k
740
9.02k
    auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
741
8.77k
    const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
742
244
                                        : nullptr;
743
9.02k
744
9.02k
    if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
745
325
        !Opts.MayInlineCXXAllocator)
746
3
      return CIP_DisallowedOnce;
747
9.01k
748
    // FIXME: We don't handle constructors or destructors for arrays properly.
749
    // Even once we do, we still need to be careful about implicitly-generated
750
    // initializers for array fields in default move/copy constructors.
751
    // We still allow construction into ElementRegion targets when they don't
752
    // represent array elements.
753
9.01k
    if (CallOpts.IsArrayCtorOrDtor)
754
151
      return CIP_DisallowedOnce;
755
8.86k
756
    // Inlining constructors requires including initializers in the CFG.
757
8.86k
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
758
8.86k
    assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
759
8.86k
    (void)ADC;
760
8.86k
761
    // If the destructor is trivial, it's always safe to inline the constructor.
762
8.86k
    if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
763
7.94k
      break;
764
919
765
    // For other types, only inline constructors if destructor inlining is
766
    // also enabled.
767
919
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
768
14
      return CIP_DisallowedAlways;
769
905
770
905
    if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
771
      // If we don't handle temporary destructors, we shouldn't inline
772
      // their constructors.
773
778
      if (CallOpts.IsTemporaryCtorOrDtor &&
774
257
          !Opts.ShouldIncludeTemporaryDtorsInCFG)
775
74
        return CIP_DisallowedOnce;
776
704
777
      // If we did not find the correct this-region, it would be pointless
778
      // to inline the constructor. Instead we will simply invalidate
779
      // the fake temporary target.
780
704
      if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
781
12
        return CIP_DisallowedOnce;
782
692
783
      // If the temporary is lifetime-extended by binding it to a reference-type
784
      // field within an aggregate, automatic destructors don't work properly.
785
692
      if (CallOpts.IsTemporaryLifetimeExtendedViaAggregate)
786
16
        return CIP_DisallowedOnce;
787
803
    }
788
803
789
803
    break;
790
803
  }
791
4
  case CE_CXXInheritedConstructor: {
792
    // This doesn't really increase the cost of inlining ever, because
793
    // the stack frame of the inherited constructor is trivial.
794
4
    return CIP_Allowed;
795
803
  }
796
749
  case CE_CXXDestructor: {
797
749
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
798
6
      return CIP_DisallowedAlways;
799
743
800
    // Inlining destructors requires building the CFG correctly.
801
743
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
802
743
    assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
803
743
    (void)ADC;
804
743
805
    // FIXME: We don't handle constructors or destructors for arrays properly.
806
743
    if (CallOpts.IsArrayCtorOrDtor)
807
16
      return CIP_DisallowedOnce;
808
727
809
    // Allow disabling temporary destructor inlining with a separate option.
810
727
    if (CallOpts.IsTemporaryCtorOrDtor &&
811
148
        !Opts.MayInlineCXXTemporaryDtors)
812
1
      return CIP_DisallowedOnce;
813
726
814
    // If we did not find the correct this-region, it would be pointless
815
    // to inline the destructor. Instead we will simply invalidate
816
    // the fake temporary target.
817
726
    if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
818
9
      return CIP_DisallowedOnce;
819
717
    break;
820
717
  }
821
0
  case CE_CXXDeallocator:
822
0
    LLVM_FALLTHROUGH;
823
361
  case CE_CXXAllocator:
824
361
    if (Opts.MayInlineCXXAllocator)
825
361
      break;
826
    // Do not inline allocators until we model deallocators.
827
    // This is unfortunate, but basically necessary for smart pointers and such.
828
0
    return CIP_DisallowedAlways;
829
352
  case CE_ObjCMessage:
830
352
    if (!Opts.MayInlineObjCMethod)
831
1
      return CIP_DisallowedAlways;
832
351
    if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
833
339
          Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate))
834
1
      return CIP_DisallowedAlways;
835
350
    break;
836
33.6k
  }
837
33.6k
838
33.6k
  return CIP_Allowed;
839
33.6k
}
840
841
/// Returns true if the given C++ class contains a member with the given name.
842
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
843
492
                      StringRef Name) {
844
492
  const IdentifierInfo &II = Ctx.Idents.get(Name);
845
492
  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
846
492
  if (!RD->lookup(DeclName).empty())
847
224
    return true;
848
268
849
268
  CXXBasePaths Paths(false, false, false);
850
268
  if (RD->lookupInBases(
851
27
          [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
852
27
            return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
853
27
          },
854
268
          Paths))
855
6
    return true;
856
262
857
262
  return false;
858
262
}
859
860
/// Returns true if the given C++ class is a container or iterator.
861
///
862
/// Our heuristic for this is whether it contains a method named 'begin()' or a
863
/// nested type named 'iterator' or 'iterator_category'.
864
281
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
865
281
  return hasMember(Ctx, RD, "begin") ||
866
160
         hasMember(Ctx, RD, "iterator") ||
867
51
         hasMember(Ctx, RD, "iterator_category");
868
281
}
869
870
/// Returns true if the given function refers to a method of a C++ container
871
/// or iterator.
872
///
873
/// We generally do a poor job modeling most containers right now, and might
874
/// prefer not to inline their methods.
875
static bool isContainerMethod(const ASTContext &Ctx,
876
486
                              const FunctionDecl *FD) {
877
486
  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
878
281
    return isContainerClass(Ctx, MD->getParent());
879
205
  return false;
880
205
}
881
882
/// Returns true if the given function is the destructor of a class named
883
/// "shared_ptr".
884
3.98k
static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
885
3.98k
  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
886
3.98k
  if (!Dtor)
887
3.69k
    return false;
888
287
889
287
  const CXXRecordDecl *RD = Dtor->getParent();
890
287
  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
891
287
    if (II->isStr("shared_ptr"))
892
5
        return true;
893
282
894
282
  return false;
895
282
}
896
897
/// Returns true if the function in \p CalleeADC may be inlined in general.
898
///
899
/// This checks static properties of the function, such as its signature and
900
/// CFG, to determine whether the analyzer should ever consider inlining it,
901
/// in any context.
902
5.44k
bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
903
5.44k
  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
904
  // FIXME: Do not inline variadic calls.
905
5.44k
  if (CallEvent::isVariadic(CalleeADC->getDecl()))
906
19
    return false;
907
5.42k
908
  // Check certain C++-related inlining policies.
909
5.42k
  ASTContext &Ctx = CalleeADC->getASTContext();
910
5.42k
  if (Ctx.getLangOpts().CPlusPlus) {
911
4.42k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
912
      // Conditionally control the inlining of template functions.
913
4.23k
      if (!Opts.MayInlineTemplateFunctions)
914
5
        if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
915
4
          return false;
916
4.22k
917
      // Conditionally control the inlining of C++ standard library functions.
918
4.22k
      if (!Opts.MayInlineCXXStandardLibrary)
919
19
        if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
920
19
          if (AnalysisDeclContext::isInStdNamespace(FD))
921
18
            return false;
922
4.21k
923
      // Conditionally control the inlining of methods on objects that look
924
      // like C++ containers.
925
4.21k
      if (!Opts.MayInlineCXXContainerMethods)
926
3.86k
        if (!AMgr.isInCodeFile(FD->getLocation()))
927
486
          if (isContainerMethod(Ctx, FD))
928
230
            return false;
929
3.98k
930
      // Conditionally control the inlining of the destructor of C++ shared_ptr.
931
      // We don't currently do a good job modeling shared_ptr because we can't
932
      // see the reference count, so treating as opaque is probably the best
933
      // idea.
934
3.98k
      if (!Opts.MayInlineCXXSharedPtrDtor)
935
3.98k
        if (isCXXSharedPtrDtor(FD))
936
5
          return false;
937
5.16k
    }
938
4.42k
  }
939
5.16k
940
  // It is possible that the CFG cannot be constructed.
941
  // Be safe, and check if the CalleeCFG is valid.
942
5.16k
  const CFG *CalleeCFG = CalleeADC->getCFG();
943
5.16k
  if (!CalleeCFG)
944
504
    return false;
945
4.66k
946
  // Do not inline large functions.
947
4.66k
  if (isHuge(CalleeADC))
948
2
    return false;
949
4.66k
950
  // It is possible that the live variables analysis cannot be
951
  // run.  If so, bail out.
952
4.66k
  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
953
0
    return false;
954
4.66k
955
4.66k
  return true;
956
4.66k
}
957
958
bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
959
                                  const ExplodedNode *Pred,
960
65.1k
                                  const EvalCallOptions &CallOpts) {
961
65.1k
  if (!D)
962
26.3k
    return false;
963
38.8k
964
38.8k
  AnalysisManager &AMgr = getAnalysisManager();
965
38.8k
  AnalyzerOptions &Opts = AMgr.options;
966
38.8k
  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
967
38.8k
  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
968
38.8k
969
  // The auto-synthesized bodies are essential to inline as they are
970
  // usually small and commonly used. Note: we should do this check early on to
971
  // ensure we always inline these calls.
972
38.8k
  if (CalleeADC->isBodyAutosynthesized())
973
409
    return true;
974
38.4k
975
38.4k
  if (!AMgr.shouldInlineCall())
976
36
    return false;
977
38.3k
978
  // Check if this function has been marked as non-inlinable.
979
38.3k
  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
980
38.3k
  if (MayInline.hasValue()) {
981
32.9k
    if (!MayInline.getValue())
982
3.62k
      return false;
983
5.44k
984
5.44k
  } else {
985
    // We haven't actually checked the static properties of this function yet.
986
    // Do that now, and record our decision in the function summaries.
987
5.44k
    if (mayInlineDecl(CalleeADC)) {
988
4.66k
      Engine.FunctionSummaries->markMayInline(D);
989
782
    } else {
990
782
      Engine.FunctionSummaries->markShouldNotInline(D);
991
782
      return false;
992
782
    }
993
33.9k
  }
994
33.9k
995
  // Check if we should inline a call based on its kind.
996
  // FIXME: this checks both static and dynamic properties of the call, which
997
  // means we're redoing a bit of work that could be cached in the function
998
  // summary.
999
33.9k
  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1000
33.9k
  if (CIP != CIP_Allowed) {
1001
304
    if (CIP == CIP_DisallowedAlways) {
1002
22
      assert(!MayInline.hasValue() || MayInline.getValue());
1003
22
      Engine.FunctionSummaries->markShouldNotInline(D);
1004
22
    }
1005
304
    return false;
1006
304
  }
1007
33.6k
1008
  // Do not inline if recursive or we've reached max stack frame count.
1009
33.6k
  bool IsRecursive = false;
1010
33.6k
  unsigned StackDepth = 0;
1011
33.6k
  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1012
33.6k
  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1013
285
      (!isSmall(CalleeADC) || 
IsRecursive275
))
1014
285
    return false;
1015
33.3k
1016
  // Do not inline large functions too many times.
1017
33.3k
  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1018
33.3k
       Opts.MaxTimesInlineLarge) &&
1019
16.7k
      isLarge(CalleeADC)) {
1020
0
    NumReachedInlineCountMax++;
1021
0
    return false;
1022
0
  }
1023
33.3k
1024
33.3k
  if (HowToInline == Inline_Minimal && 
(25
!isSmall(CalleeADC)25
||
IsRecursive24
))
1025
1
    return false;
1026
33.3k
1027
33.3k
  return true;
1028
33.3k
}
1029
1030
65.2k
static bool isTrivialObjectAssignment(const CallEvent &Call) {
1031
65.2k
  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1032
65.2k
  if (!ICall)
1033
52.1k
    return false;
1034
13.1k
1035
13.1k
  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1036
13.1k
  if (!MD)
1037
1
    return false;
1038
13.1k
  if (!(MD->isCopyAssignmentOperator() || 
MD->isMoveAssignmentOperator()12.9k
))
1039
11.4k
    return false;
1040
1.67k
1041
1.67k
  return MD->isTrivial();
1042
1.67k
}
1043
1044
void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
1045
                                 const CallEvent &CallTemplate,
1046
65.2k
                                 const EvalCallOptions &CallOpts) {
1047
  // Make sure we have the most recent state attached to the call.
1048
65.2k
  ProgramStateRef State = Pred->getState();
1049
65.2k
  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1050
65.2k
1051
  // Special-case trivial assignment operators.
1052
65.2k
  if (isTrivialObjectAssignment(*Call)) {
1053
110
    performTrivialCopy(Bldr, Pred, *Call);
1054
110
    return;
1055
110
  }
1056
65.1k
1057
  // Try to inline the call.
1058
  // The origin expression here is just used as a kind of checksum;
1059
  // this should still be safe even for CallEvents that don't come from exprs.
1060
65.1k
  const Expr *E = Call->getOriginExpr();
1061
65.1k
1062
65.1k
  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1063
65.1k
  if (InlinedFailedState) {
1064
    // If we already tried once and failed, make sure we don't retry later.
1065
35
    State = InlinedFailedState;
1066
65.1k
  } else {
1067
65.1k
    RuntimeDefinition RD = Call->getRuntimeDefinition();
1068
65.1k
    const Decl *D = RD.getDecl();
1069
65.1k
    if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1070
33.8k
      if (RD.mayHaveOtherDefinitions()) {
1071
18
        AnalyzerOptions &Options = getAnalysisManager().options;
1072
18
1073
        // Explore with and without inlining the call.
1074
18
        if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1075
15
          BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1076
15
          return;
1077
15
        }
1078
3
1079
        // Don't inline if we're not in any dynamic dispatch mode.
1080
3
        if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1081
3
          conservativeEvalCall(*Call, Bldr, Pred, State);
1082
3
          return;
1083
3
        }
1084
33.7k
      }
1085
33.7k
1086
      // We are not bifurcating and we do have a Decl, so just inline.
1087
33.7k
      if (inlineCall(*Call, D, Bldr, Pred, State))
1088
33.7k
        return;
1089
31.3k
    }
1090
65.1k
  }
1091
31.3k
1092
  // If we can't inline it, handle the return value and invalidate the regions.
1093
31.3k
  conservativeEvalCall(*Call, Bldr, Pred, State);
1094
31.3k
}
1095
1096
void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1097
                               const CallEvent &Call, const Decl *D,
1098
15
                               NodeBuilder &Bldr, ExplodedNode *Pred) {
1099
15
  assert(BifurReg);
1100
15
  BifurReg = BifurReg->StripCasts();
1101
15
1102
  // Check if we've performed the split already - note, we only want
1103
  // to split the path once per memory region.
1104
15
  ProgramStateRef State = Pred->getState();
1105
15
  const unsigned *BState =
1106
15
                        State->get<DynamicDispatchBifurcationMap>(BifurReg);
1107
15
  if (BState) {
1108
    // If we are on "inline path", keep inlining if possible.
1109
6
    if (*BState == DynamicDispatchModeInlined)
1110
3
      if (inlineCall(Call, D, Bldr, Pred, State))
1111
3
        return;
1112
    // If inline failed, or we are on the path where we assume we
1113
    // don't have enough info about the receiver to inline, conjure the
1114
    // return value and invalidate the regions.
1115
3
    conservativeEvalCall(Call, Bldr, Pred, State);
1116
3
    return;
1117
3
  }
1118
9
1119
  // If we got here, this is the first time we process a message to this
1120
  // region, so split the path.
1121
9
  ProgramStateRef IState =
1122
9
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1123
9
                                               DynamicDispatchModeInlined);
1124
9
  inlineCall(Call, D, Bldr, Pred, IState);
1125
9
1126
9
  ProgramStateRef NoIState =
1127
9
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1128
9
                                               DynamicDispatchModeConservative);
1129
9
  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1130
9
1131
9
  NumOfDynamicDispatchPathSplits++;
1132
9
}
1133
1134
void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
1135
22.5k
                                 ExplodedNodeSet &Dst) {
1136
22.5k
  ExplodedNodeSet dstPreVisit;
1137
22.5k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1138
22.5k
1139
22.5k
  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1140
22.5k
1141
22.5k
  if (RS->getRetValue()) {
1142
19.9k
    for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1143
39.8k
                                  ei = dstPreVisit.end(); it != ei; 
++it19.8k
) {
1144
19.8k
      B.generateNode(RS, *it, (*it)->getState());
1145
19.8k
    }
1146
19.9k
  }
1147
22.5k
}