Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp
Line
Count
Source (jump to first uncovered line)
1
//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
//  This file defines ExprEngine's support for calls and returns.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
14
#include "PrettyStackTraceLocationContext.h"
15
#include "clang/AST/CXXInheritance.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/Analysis/Analyses/LiveVariables.h"
18
#include "clang/Analysis/ConstructionContext.h"
19
#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20
#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
21
#include "llvm/ADT/SmallSet.h"
22
#include "llvm/ADT/Statistic.h"
23
#include "llvm/Support/SaveAndRestore.h"
24
25
using namespace clang;
26
using namespace ento;
27
28
#define DEBUG_TYPE "ExprEngine"
29
30
STATISTIC(NumOfDynamicDispatchPathSplits,
31
  "The # of times we split the path due to imprecise dynamic dispatch info");
32
33
STATISTIC(NumInlinedCalls,
34
  "The # of times we inlined a call");
35
36
STATISTIC(NumReachedInlineCountMax,
37
  "The # of times we reached inline count maximum");
38
39
void ExprEngine::processCallEnter(NodeBuilderContext& BC, CallEnter CE,
40
30.4k
                                  ExplodedNode *Pred) {
41
30.4k
  // Get the entry block in the CFG of the callee.
42
30.4k
  const StackFrameContext *calleeCtx = CE.getCalleeContext();
43
30.4k
  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
44
30.4k
  const CFGBlock *Entry = CE.getEntry();
45
30.4k
46
30.4k
  // Validate the CFG.
47
30.4k
  assert(Entry->empty());
48
30.4k
  assert(Entry->succ_size() == 1);
49
30.4k
50
30.4k
  // Get the solitary successor.
51
30.4k
  const CFGBlock *Succ = *(Entry->succ_begin());
52
30.4k
53
30.4k
  // Construct an edge representing the starting location in the callee.
54
30.4k
  BlockEdge Loc(Entry, Succ, calleeCtx);
55
30.4k
56
30.4k
  ProgramStateRef state = Pred->getState();
57
30.4k
58
30.4k
  // Construct a new node, notify checkers that analysis of the function has
59
30.4k
  // begun, and add the resultant nodes to the worklist.
60
30.4k
  bool isNew;
61
30.4k
  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62
30.4k
  Node->addPredecessor(Pred, G);
63
30.4k
  if (isNew) {
64
30.4k
    ExplodedNodeSet DstBegin;
65
30.4k
    processBeginOfFunction(BC, Node, DstBegin, Loc);
66
30.4k
    Engine.enqueue(DstBegin);
67
30.4k
  }
68
30.4k
}
69
70
// Find the last statement on the path to the exploded node and the
71
// corresponding Block.
72
static std::pair<const Stmt*,
73
57.5k
                 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
74
57.5k
  const Stmt *S = nullptr;
75
57.5k
  const CFGBlock *Blk = nullptr;
76
57.5k
  const StackFrameContext *SF = Node->getStackFrame();
77
57.5k
78
57.5k
  // Back up through the ExplodedGraph until we reach a statement node in this
79
57.5k
  // stack frame.
80
160k
  while (Node) {
81
160k
    const ProgramPoint &PP = Node->getLocation();
82
160k
83
160k
    if (PP.getStackFrame() == SF) {
84
157k
      if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
85
51.8k
        S = SP->getStmt();
86
51.8k
        break;
87
105k
      } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
88
2.51k
        S = CEE->getCalleeContext()->getCallSite();
89
2.51k
        if (S)
90
2.19k
          break;
91
324
92
324
        // If there is no statement, this is an implicitly-generated call.
93
324
        // We'll walk backwards over it and then continue the loop to find
94
324
        // an actual statement.
95
324
        Optional<CallEnter> CE;
96
4.25k
        do {
97
4.25k
          Node = Node->getFirstPred();
98
4.25k
          CE = Node->getLocationAs<CallEnter>();
99
4.25k
        } while (!CE || 
CE->getCalleeContext() != CEE->getCalleeContext()443
);
100
324
101
324
        // Continue searching the graph.
102
103k
      } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
103
59.2k
        Blk = BE->getSrc();
104
59.2k
      }
105
157k
    } else 
if (Optional<CallEnter> 3.30k
CE3.30k
= PP.getAs<CallEnter>()) {
106
3.30k
      // If we reached the CallEnter for this function, it has no statements.
107
3.30k
      if (CE->getCalleeContext() == SF)
108
3.30k
        break;
109
103k
    }
110
103k
111
103k
    if (Node->pred_empty())
112
199
      return std::make_pair(nullptr, nullptr);
113
103k
114
103k
    Node = *Node->pred_begin();
115
103k
  }
116
57.5k
117
57.5k
  
return std::make_pair(S, Blk)57.3k
;
118
57.5k
}
119
120
/// Adjusts a return value when the called function's return type does not
121
/// match the caller's expression type. This can happen when a dynamic call
122
/// is devirtualized, and the overriding method has a covariant (more specific)
123
/// return type than the parent's method. For C++ objects, this means we need
124
/// to add base casts.
125
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
126
63
                              StoreManager &StoreMgr) {
127
63
  // For now, the only adjustments we handle apply only to locations.
128
63
  if (!V.getAs<Loc>())
129
59
    return V;
130
4
131
4
  // If the types already match, don't do any unnecessary work.
132
4
  ExpectedTy = ExpectedTy.getCanonicalType();
133
4
  ActualTy = ActualTy.getCanonicalType();
134
4
  if (ExpectedTy == ActualTy)
135
2
    return V;
136
2
137
2
  // No adjustment is needed between Objective-C pointer types.
138
2
  if (ExpectedTy->isObjCObjectPointerType() &&
139
2
      
ActualTy->isObjCObjectPointerType()0
)
140
0
    return V;
141
2
142
2
  // C++ object pointers may need "derived-to-base" casts.
143
2
  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
144
2
  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
145
2
  if (ExpectedClass && 
ActualClass1
) {
146
1
    CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
147
1
                       /*DetectVirtual=*/false);
148
1
    if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
149
1
        !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
150
1
      return StoreMgr.evalDerivedToBase(V, Paths.front());
151
1
    }
152
1
  }
153
1
154
1
  // Unfortunately, Objective-C does not enforce that overridden methods have
155
1
  // covariant return types, so we can't assert that that never happens.
156
1
  // Be safe and return UnknownVal().
157
1
  return UnknownVal();
158
1
}
159
160
void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
161
                                           ExplodedNode *Pred,
162
18.7k
                                           ExplodedNodeSet &Dst) {
163
18.7k
  // Find the last statement in the function and the corresponding basic block.
164
18.7k
  const Stmt *LastSt = nullptr;
165
18.7k
  const CFGBlock *Blk = nullptr;
166
18.7k
  std::tie(LastSt, Blk) = getLastStmt(Pred);
167
18.7k
  if (!Blk || 
!LastSt18.5k
) {
168
199
    Dst.Add(Pred);
169
199
    return;
170
199
  }
171
18.5k
172
18.5k
  // Here, we destroy the current location context. We use the current
173
18.5k
  // function's entire body as a diagnostic statement, with which the program
174
18.5k
  // point will be associated. However, we only want to use LastStmt as a
175
18.5k
  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
176
18.5k
  // is dead.
177
18.5k
  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
178
18.5k
  const LocationContext *LCtx = Pred->getLocationContext();
179
18.5k
  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
180
18.5k
             LCtx->getAnalysisDeclContext()->getBody(),
181
18.5k
             ProgramPoint::PostStmtPurgeDeadSymbolsKind);
182
18.5k
}
183
184
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
185
15.0k
    const StackFrameContext *calleeCtx) {
186
15.0k
  const Decl *RuntimeCallee = calleeCtx->getDecl();
187
15.0k
  const Decl *StaticDecl = Call->getDecl();
188
15.0k
  assert(RuntimeCallee);
189
15.0k
  if (!StaticDecl)
190
0
    return true;
191
15.0k
  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
192
15.0k
}
193
194
/// The call exit is simulated with a sequence of nodes, which occur between
195
/// CallExitBegin and CallExitEnd. The following operations occur between the
196
/// two program points:
197
/// 1. CallExitBegin (triggers the start of call exit sequence)
198
/// 2. Bind the return value
199
/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
200
/// 4. CallExitEnd (switch to the caller context)
201
/// 5. PostStmt<CallExpr>
202
38.8k
void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
203
38.8k
  // Step 1 CEBNode was generated before the call.
204
38.8k
  PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
205
38.8k
  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
206
38.8k
207
38.8k
  // The parent context might not be a stack frame, so make sure we
208
38.8k
  // look up the first enclosing stack frame.
209
38.8k
  const StackFrameContext *callerCtx =
210
38.8k
    calleeCtx->getParent()->getStackFrame();
211
38.8k
212
38.8k
  const Stmt *CE = calleeCtx->getCallSite();
213
38.8k
  ProgramStateRef state = CEBNode->getState();
214
38.8k
  // Find the last statement in the function and the corresponding basic block.
215
38.8k
  const Stmt *LastSt = nullptr;
216
38.8k
  const CFGBlock *Blk = nullptr;
217
38.8k
  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
218
38.8k
219
38.8k
  // Generate a CallEvent /before/ cleaning the state, so that we can get the
220
38.8k
  // correct value for 'this' (if necessary).
221
38.8k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
222
38.8k
  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
223
38.8k
224
38.8k
  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
225
38.8k
226
38.8k
  // If the callee returns an expression, bind its value to CallExpr.
227
38.8k
  if (CE) {
228
38.1k
    if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
229
15.0k
      const LocationContext *LCtx = CEBNode->getLocationContext();
230
15.0k
      SVal V = state->getSVal(RS, LCtx);
231
15.0k
232
15.0k
      // Ensure that the return type matches the type of the returned Expr.
233
15.0k
      if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
234
63
        QualType ReturnedTy =
235
63
          CallEvent::getDeclaredResultType(calleeCtx->getDecl());
236
63
        if (!ReturnedTy.isNull()) {
237
63
          if (const Expr *Ex = dyn_cast<Expr>(CE)) {
238
63
            V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
239
63
                                  getStoreManager());
240
63
          }
241
63
        }
242
63
      }
243
15.0k
244
15.0k
      state = state->BindExpr(CE, callerCtx, V);
245
15.0k
    }
246
38.1k
247
38.1k
    // Bind the constructed object value to CXXConstructExpr.
248
38.1k
    if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
249
7.36k
      loc::MemRegionVal This =
250
7.36k
        svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
251
7.36k
      SVal ThisV = state->getSVal(This);
252
7.36k
      ThisV = state->getSVal(ThisV.castAs<Loc>());
253
7.36k
      state = state->BindExpr(CCE, callerCtx, ThisV);
254
7.36k
    }
255
38.1k
256
38.1k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
257
345
      // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
258
345
      // while to reach the actual CXXNewExpr element from here, so keep the
259
345
      // region for later use.
260
345
      // Additionally cast the return value of the inlined operator new
261
345
      // (which is of type 'void *') to the correct object type.
262
345
      SVal AllocV = state->getSVal(CNE, callerCtx);
263
345
      AllocV = svalBuilder.evalCast(
264
345
          AllocV, CNE->getType(),
265
345
          getContext().getPointerType(getContext().VoidTy));
266
345
267
345
      state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
268
345
                                         AllocV);
269
345
    }
270
38.1k
  }
271
38.8k
272
38.8k
  // Step 3: BindedRetNode -> CleanedNodes
273
38.8k
  // If we can find a statement and a block in the inlined function, run remove
274
38.8k
  // dead bindings before returning from the call. This is important to ensure
275
38.8k
  // that we report the issues such as leaks in the stack contexts in which
276
38.8k
  // they occurred.
277
38.8k
  ExplodedNodeSet CleanedNodes;
278
38.8k
  if (LastSt && 
Blk35.4k
&&
AMgr.options.AnalysisPurgeOpt != PurgeNone35.4k
) {
279
35.4k
    static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
280
35.4k
    PostStmt Loc(LastSt, calleeCtx, &retValBind);
281
35.4k
    bool isNew;
282
35.4k
    ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
283
35.4k
    BindedRetNode->addPredecessor(CEBNode, G);
284
35.4k
    if (!isNew)
285
0
      return;
286
35.4k
287
35.4k
    NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
288
35.4k
    currBldrCtx = &Ctx;
289
35.4k
    // Here, we call the Symbol Reaper with 0 statement and callee location
290
35.4k
    // context, telling it to clean up everything in the callee's context
291
35.4k
    // (and its children). We use the callee's function body as a diagnostic
292
35.4k
    // statement, with which the program point will be associated.
293
35.4k
    removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
294
35.4k
               calleeCtx->getAnalysisDeclContext()->getBody(),
295
35.4k
               ProgramPoint::PostStmtPurgeDeadSymbolsKind);
296
35.4k
    currBldrCtx = nullptr;
297
35.4k
  } else {
298
3.30k
    CleanedNodes.Add(CEBNode);
299
3.30k
  }
300
38.8k
301
38.8k
  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
302
75.9k
                                 E = CleanedNodes.end(); I != E; 
++I37.1k
) {
303
37.1k
304
37.1k
    // Step 4: Generate the CallExit and leave the callee's context.
305
37.1k
    // CleanedNodes -> CEENode
306
37.1k
    CallExitEnd Loc(calleeCtx, callerCtx);
307
37.1k
    bool isNew;
308
37.1k
    ProgramStateRef CEEState = (*I == CEBNode) ? 
state3.30k
:
(*I)->getState()33.8k
;
309
37.1k
310
37.1k
    ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
311
37.1k
    CEENode->addPredecessor(*I, G);
312
37.1k
    if (!isNew)
313
0
      return;
314
37.1k
315
37.1k
    // Step 5: Perform the post-condition check of the CallExpr and enqueue the
316
37.1k
    // result onto the work list.
317
37.1k
    // CEENode -> Dst -> WorkList
318
37.1k
    NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
319
37.1k
    SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
320
37.1k
        &Ctx);
321
37.1k
    SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
322
37.1k
323
37.1k
    CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
324
37.1k
325
37.1k
    ExplodedNodeSet DstPostCall;
326
37.1k
    if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
327
345
      ExplodedNodeSet DstPostPostCallCallback;
328
345
      getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
329
345
                                                 CEENode, *UpdatedCall, *this,
330
345
                                                 /*wasInlined=*/true);
331
345
      for (auto I : DstPostPostCallCallback) {
332
345
        getCheckerManager().runCheckersForNewAllocator(
333
345
            CNE,
334
345
            *getObjectUnderConstruction(I->getState(), CNE,
335
345
                                        calleeCtx->getParent()),
336
345
            DstPostCall, I, *this,
337
345
            /*wasInlined=*/true);
338
345
      }
339
36.7k
    } else {
340
36.7k
      getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
341
36.7k
                                                 *UpdatedCall, *this,
342
36.7k
                                                 /*wasInlined=*/true);
343
36.7k
    }
344
37.1k
    ExplodedNodeSet Dst;
345
37.1k
    if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
346
535
      getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
347
535
                                                        *this,
348
535
                                                        /*wasInlined=*/true);
349
36.5k
    } else if (CE &&
350
36.5k
               
!(35.8k
isa<CXXNewExpr>(CE)35.8k
&& // Called when visiting CXXNewExpr.
351
35.8k
                 
AMgr.getAnalyzerOptions().MayInlineCXXAllocator345
)) {
352
35.5k
      getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
353
35.5k
                                                 *this, /*wasInlined=*/true);
354
35.5k
    } else {
355
1.02k
      Dst.insert(DstPostCall);
356
1.02k
    }
357
37.1k
358
37.1k
    // Enqueue the next element in the block.
359
37.1k
    for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
360
74.2k
                                   PSI != PSE; 
++PSI37.1k
) {
361
37.1k
      Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
362
37.1k
                                    calleeCtx->getIndex()+1);
363
37.1k
    }
364
37.1k
  }
365
38.8k
}
366
367
62.9k
bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
368
62.9k
  // When there are no branches in the function, it means that there's no
369
62.9k
  // exponential complexity introduced by inlining such function.
370
62.9k
  // Such functions also don't trigger various fundamental problems
371
62.9k
  // with our inlining mechanism, such as the problem of
372
62.9k
  // inlined defensive checks. Hence isLinear().
373
62.9k
  const CFG *Cfg = ADC->getCFG();
374
62.9k
  return Cfg->isLinear() || 
Cfg->size() <= AMgr.options.AlwaysInlineSize18.5k
;
375
62.9k
}
376
377
16.3k
bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
378
16.3k
  const CFG *Cfg = ADC->getCFG();
379
16.3k
  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
380
16.3k
}
381
382
4.15k
bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
383
4.15k
  const CFG *Cfg = ADC->getCFG();
384
4.15k
  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
385
4.15k
}
386
387
void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
388
30.2k
                               bool &IsRecursive, unsigned &StackDepth) {
389
30.2k
  IsRecursive = false;
390
30.2k
  StackDepth = 0;
391
30.2k
392
94.4k
  while (LCtx) {
393
64.2k
    if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
394
64.1k
      const Decl *DI = SFC->getDecl();
395
64.1k
396
64.1k
      // Mark recursive (and mutually recursive) functions and always count
397
64.1k
      // them when measuring the stack depth.
398
64.1k
      if (DI == D) {
399
1.42k
        IsRecursive = true;
400
1.42k
        ++StackDepth;
401
1.42k
        LCtx = LCtx->getParent();
402
1.42k
        continue;
403
1.42k
      }
404
62.7k
405
62.7k
      // Do not count the small functions when determining the stack depth.
406
62.7k
      AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
407
62.7k
      if (!isSmall(CalleeADC))
408
18.5k
        ++StackDepth;
409
62.7k
    }
410
64.2k
    LCtx = LCtx->getParent();
411
62.8k
  }
412
30.2k
}
413
414
// The GDM component containing the dynamic dispatch bifurcation info. When
415
// the exact type of the receiver is not known, we want to explore both paths -
416
// one on which we do inline it and the other one on which we don't. This is
417
// done to ensure we do not drop coverage.
418
// This is the map from the receiver region to a bool, specifying either we
419
// consider this region's information precise or not along the given path.
420
namespace {
421
  enum DynamicDispatchMode {
422
    DynamicDispatchModeInlined = 1,
423
    DynamicDispatchModeConservative
424
  };
425
} // end anonymous namespace
426
427
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
428
                               const MemRegion *, unsigned)
429
430
bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
431
                            NodeBuilder &Bldr, ExplodedNode *Pred,
432
30.4k
                            ProgramStateRef State) {
433
30.4k
  assert(D);
434
30.4k
435
30.4k
  const LocationContext *CurLC = Pred->getLocationContext();
436
30.4k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
437
30.4k
  const LocationContext *ParentOfCallee = CallerSFC;
438
30.4k
  if (Call.getKind() == CE_Block &&
439
30.4k
      
!cast<BlockCall>(Call).isConversionFromLambda()174
) {
440
167
    const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
441
167
    assert(BR && "If we have the block definition we should have its region");
442
167
    AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
443
167
    ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
444
167
                                                         cast<BlockDecl>(D),
445
167
                                                         BR);
446
167
  }
447
30.4k
448
30.4k
  // This may be NULL, but that's fine.
449
30.4k
  const Expr *CallE = Call.getOriginExpr();
450
30.4k
451
30.4k
  // Construct a new stack frame for the callee.
452
30.4k
  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
453
30.4k
  const StackFrameContext *CalleeSFC =
454
30.4k
    CalleeADC->getStackFrame(ParentOfCallee, CallE,
455
30.4k
                             currBldrCtx->getBlock(),
456
30.4k
                             currStmtIdx);
457
30.4k
458
30.4k
  CallEnter Loc(CallE, CalleeSFC, CurLC);
459
30.4k
460
30.4k
  // Construct a new state which contains the mapping from actual to
461
30.4k
  // formal arguments.
462
30.4k
  State = State->enterStackFrame(Call, CalleeSFC);
463
30.4k
464
30.4k
  bool isNew;
465
30.4k
  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
466
30.4k
    N->addPredecessor(Pred, G);
467
30.4k
    if (isNew)
468
30.4k
      Engine.getWorkList()->enqueue(N);
469
30.4k
  }
470
30.4k
471
30.4k
  // If we decided to inline the call, the successor has been manually
472
30.4k
  // added onto the work list so remove it from the node builder.
473
30.4k
  Bldr.takeNodes(Pred);
474
30.4k
475
30.4k
  NumInlinedCalls++;
476
30.4k
  Engine.FunctionSummaries->bumpNumTimesInlined(D);
477
30.4k
478
30.4k
  // Mark the decl as visited.
479
30.4k
  if (VisitedCallees)
480
30.4k
    VisitedCallees->insert(D);
481
30.4k
482
30.4k
  return true;
483
30.4k
}
484
485
static ProgramStateRef getInlineFailedState(ProgramStateRef State,
486
57.4k
                                            const Stmt *CallE) {
487
57.4k
  const void *ReplayState = State->get<ReplayWithoutInlining>();
488
57.4k
  if (!ReplayState)
489
57.4k
    return nullptr;
490
30
491
30
  assert(ReplayState == CallE && "Backtracked to the wrong call.");
492
30
  (void)CallE;
493
30
494
30
  return State->remove<ReplayWithoutInlining>();
495
30
}
496
497
void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
498
53.9k
                               ExplodedNodeSet &dst) {
499
53.9k
  // Perform the previsit of the CallExpr.
500
53.9k
  ExplodedNodeSet dstPreVisit;
501
53.9k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
502
53.9k
503
53.9k
  // Get the call in its initial state. We use this as a template to perform
504
53.9k
  // all the checks.
505
53.9k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
506
53.9k
  CallEventRef<> CallTemplate
507
53.9k
    = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
508
53.9k
509
53.9k
  // Evaluate the function call.  We try each of the checkers
510
53.9k
  // to see if the can evaluate the function call.
511
53.9k
  ExplodedNodeSet dstCallEvaluated;
512
53.9k
  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
513
107k
       I != E; 
++I53.9k
) {
514
53.9k
    evalCall(dstCallEvaluated, *I, *CallTemplate);
515
53.9k
  }
516
53.9k
517
53.9k
  // Finally, perform the post-condition check of the CallExpr and store
518
53.9k
  // the created nodes in 'Dst'.
519
53.9k
  // Note that if the call was inlined, dstCallEvaluated will be empty.
520
53.9k
  // The post-CallExpr check will occur in processCallExit.
521
53.9k
  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
522
53.9k
                                             *this);
523
53.9k
}
524
525
ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
526
86.1k
                                                       const CallEvent &Call) {
527
86.1k
  const Expr *E = Call.getOriginExpr();
528
86.1k
  // FIXME: Constructors to placement arguments of operator new
529
86.1k
  // are not supported yet.
530
86.1k
  if (!E || 
isa<CXXNewExpr>(E)85.4k
)
531
1.02k
    return State;
532
85.1k
533
85.1k
  const LocationContext *LC = Call.getLocationContext();
534
158k
  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; 
++CallI72.9k
) {
535
72.9k
    unsigned I = Call.getASTArgumentIndex(CallI);
536
72.9k
    if (Optional<SVal> V =
537
1.18k
            getObjectUnderConstruction(State, {E, I}, LC)) {
538
1.18k
      SVal VV = *V;
539
1.18k
      (void)VV;
540
1.18k
      assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
541
1.18k
                 ->getStackFrame()->getParent()
542
1.18k
                 ->getStackFrame() == LC->getStackFrame());
543
1.18k
      State = finishObjectConstruction(State, {E, I}, LC);
544
1.18k
    }
545
72.9k
  }
546
85.1k
547
85.1k
  return State;
548
85.1k
}
549
550
void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
551
                                            ExplodedNode *Pred,
552
47.3k
                                            const CallEvent &Call) {
553
47.3k
  ProgramStateRef State = Pred->getState();
554
47.3k
  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
555
47.3k
  if (CleanedState == State) {
556
47.3k
    Dst.insert(Pred);
557
47.3k
    return;
558
47.3k
  }
559
19
560
19
  const Expr *E = Call.getOriginExpr();
561
19
  const LocationContext *LC = Call.getLocationContext();
562
19
  NodeBuilder B(Pred, Dst, *currBldrCtx);
563
19
  static SimpleProgramPointTag Tag("ExprEngine",
564
19
                                   "Finish argument construction");
565
19
  PreStmt PP(E, LC, &Tag);
566
19
  B.generateNode(PP, CleanedState, Pred);
567
19
}
568
569
void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
570
53.9k
                          const CallEvent &Call) {
571
53.9k
  // WARNING: At this time, the state attached to 'Call' may be older than the
572
53.9k
  // state in 'Pred'. This is a minor optimization since CheckerManager will
573
53.9k
  // use an updated CallEvent instance when calling checkers, but if 'Call' is
574
53.9k
  // ever used directly in this function all callers should be updated to pass
575
53.9k
  // the most recent state. (It is probably not worth doing the work here since
576
53.9k
  // for some callers this will not be necessary.)
577
53.9k
578
53.9k
  // Run any pre-call checks using the generic call interface.
579
53.9k
  ExplodedNodeSet dstPreVisit;
580
53.9k
  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
581
53.9k
                                            Call, *this);
582
53.9k
583
53.9k
  // Actually evaluate the function call.  We try each of the checkers
584
53.9k
  // to see if the can evaluate the function call, and get a callback at
585
53.9k
  // defaultEvalCall if all of them fail.
586
53.9k
  ExplodedNodeSet dstCallEvaluated;
587
53.9k
  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
588
53.9k
                                             Call, *this);
589
53.9k
590
53.9k
  // If there were other constructors called for object-type arguments
591
53.9k
  // of this call, clean them up.
592
53.9k
  ExplodedNodeSet dstArgumentCleanup;
593
53.9k
  for (auto I : dstCallEvaluated)
594
31.2k
    finishArgumentConstruction(dstArgumentCleanup, I, Call);
595
53.9k
596
53.9k
  // Finally, run any post-call checks.
597
53.9k
  getCheckerManager().runCheckersForPostCall(Dst, dstArgumentCleanup,
598
53.9k
                                             Call, *this);
599
53.9k
}
600
601
ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
602
                                            const LocationContext *LCtx,
603
37.0k
                                            ProgramStateRef State) {
604
37.0k
  const Expr *E = Call.getOriginExpr();
605
37.0k
  if (!E)
606
609
    return State;
607
36.4k
608
36.4k
  // Some method families have known return values.
609
36.4k
  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
610
3.41k
    switch (Msg->getMethodFamily()) {
611
3.41k
    default:
612
3.10k
      break;
613
3.41k
    case OMF_autorelease:
614
310
    case OMF_retain:
615
310
    case OMF_self: {
616
310
      // These methods return their receivers.
617
310
      return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
618
33.0k
    }
619
33.0k
    }
620
33.0k
  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
621
10.8k
    SVal ThisV = C->getCXXThisVal();
622
10.8k
    ThisV = State->getSVal(ThisV.castAs<Loc>());
623
10.8k
    return State->BindExpr(E, LCtx, ThisV);
624
10.8k
  }
625
25.2k
626
25.2k
  SVal R;
627
25.2k
  QualType ResultTy = Call.getResultType();
628
25.2k
  unsigned Count = currBldrCtx->blockCount();
629
25.2k
  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
630
591
    // Conjure a temporary if the function returns an object by value.
631
591
    SVal Target;
632
591
    assert(RTC->getStmt() == Call.getOriginExpr());
633
591
    EvalCallOptions CallOpts; // FIXME: We won't really need those.
634
591
    std::tie(State, Target) =
635
591
        prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
636
591
                                     RTC->getConstructionContext(), CallOpts);
637
591
    const MemRegion *TargetR = Target.getAsRegion();
638
591
    assert(TargetR);
639
591
    // Invalidate the region so that it didn't look uninitialized. If this is
640
591
    // a field or element constructor, we do not want to invalidate
641
591
    // the whole structure. Pointer escape is meaningless because
642
591
    // the structure is a product of conservative evaluation
643
591
    // and therefore contains nothing interesting at this point.
644
591
    RegionAndSymbolInvalidationTraits ITraits;
645
591
    ITraits.setTrait(TargetR,
646
591
        RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
647
591
    State = State->invalidateRegions(TargetR, E, Count, LCtx,
648
591
                                     /* CausesPointerEscape=*/false, nullptr,
649
591
                                     &Call, &ITraits);
650
591
651
591
    R = State->getSVal(Target.castAs<Loc>(), E->getType());
652
24.6k
  } else {
653
24.6k
    // Conjure a symbol if the return value is unknown.
654
24.6k
655
24.6k
    // See if we need to conjure a heap pointer instead of
656
24.6k
    // a regular unknown pointer.
657
24.6k
    bool IsHeapPointer = false;
658
24.6k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
659
685
      if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
660
636
        // FIXME: Delegate this to evalCall in MallocChecker?
661
636
        IsHeapPointer = true;
662
636
      }
663
24.6k
664
24.6k
    R = IsHeapPointer ? 
svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)636
665
24.6k
                      : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
666
24.0k
                                                     Count);
667
24.6k
  }
668
25.2k
  return State->BindExpr(E, LCtx, R);
669
25.2k
}
670
671
// Conservatively evaluate call by invalidating regions and binding
672
// a conjured return value.
673
void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
674
                                      ExplodedNode *Pred,
675
27.0k
                                      ProgramStateRef State) {
676
27.0k
  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
677
27.0k
  State = bindReturnValue(Call, Pred->getLocationContext(), State);
678
27.0k
679
27.0k
  // And make the result node.
680
27.0k
  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
681
27.0k
}
682
683
ExprEngine::CallInlinePolicy
684
ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
685
                              AnalyzerOptions &Opts,
686
30.6k
                              const ExprEngine::EvalCallOptions &CallOpts) {
687
30.6k
  const LocationContext *CurLC = Pred->getLocationContext();
688
30.6k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
689
30.6k
  switch (Call.getKind()) {
690
30.6k
  case CE_Function:
691
16.7k
  case CE_Block:
692
16.7k
    break;
693
16.7k
  case CE_CXXMember:
694
4.85k
  case CE_CXXMemberOperator:
695
4.85k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
696
0
      return CIP_DisallowedAlways;
697
4.85k
    break;
698
7.54k
  case CE_CXXConstructor: {
699
7.54k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
700
0
      return CIP_DisallowedAlways;
701
7.54k
702
7.54k
    const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
703
7.54k
704
7.54k
    const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
705
7.54k
706
7.54k
    auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
707
7.54k
    const ConstructionContext *CC = CCE ? 
CCE->getConstructionContext()7.30k
708
7.54k
                                        : 
nullptr243
;
709
7.54k
710
7.54k
    if (CC && 
isa<NewAllocatedObjectConstructionContext>(CC)7.30k
&&
711
7.54k
        
!Opts.MayInlineCXXAllocator314
)
712
3
      return CIP_DisallowedOnce;
713
7.54k
714
7.54k
    // FIXME: We don't handle constructors or destructors for arrays properly.
715
7.54k
    // Even once we do, we still need to be careful about implicitly-generated
716
7.54k
    // initializers for array fields in default move/copy constructors.
717
7.54k
    // We still allow construction into ElementRegion targets when they don't
718
7.54k
    // represent array elements.
719
7.54k
    if (CallOpts.IsArrayCtorOrDtor)
720
117
      return CIP_DisallowedOnce;
721
7.42k
722
7.42k
    // Inlining constructors requires including initializers in the CFG.
723
7.42k
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
724
7.42k
    assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
725
7.42k
    (void)ADC;
726
7.42k
727
7.42k
    // If the destructor is trivial, it's always safe to inline the constructor.
728
7.42k
    if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
729
6.48k
      break;
730
943
731
943
    // For other types, only inline constructors if destructor inlining is
732
943
    // also enabled.
733
943
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
734
8
      return CIP_DisallowedAlways;
735
935
736
935
    if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
737
805
      // If we don't handle temporary destructors, we shouldn't inline
738
805
      // their constructors.
739
805
      if (CallOpts.IsTemporaryCtorOrDtor &&
740
805
          
!Opts.ShouldIncludeTemporaryDtorsInCFG263
)
741
76
        return CIP_DisallowedOnce;
742
729
743
729
      // If we did not find the correct this-region, it would be pointless
744
729
      // to inline the constructor. Instead we will simply invalidate
745
729
      // the fake temporary target.
746
729
      if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
747
18
        return CIP_DisallowedOnce;
748
711
749
711
      // If the temporary is lifetime-extended by binding it to a reference-type
750
711
      // field within an aggregate, automatic destructors don't work properly.
751
711
      if (CallOpts.IsTemporaryLifetimeExtendedViaAggregate)
752
16
        return CIP_DisallowedOnce;
753
825
    }
754
825
755
825
    break;
756
825
  }
757
825
  case CE_CXXDestructor: {
758
792
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
759
4
      return CIP_DisallowedAlways;
760
788
761
788
    // Inlining destructors requires building the CFG correctly.
762
788
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
763
788
    assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
764
788
    (void)ADC;
765
788
766
788
    // FIXME: We don't handle constructors or destructors for arrays properly.
767
788
    if (CallOpts.IsArrayCtorOrDtor)
768
16
      return CIP_DisallowedOnce;
769
772
770
772
    // Allow disabling temporary destructor inlining with a separate option.
771
772
    if (CallOpts.IsTemporaryCtorOrDtor &&
772
772
        
!Opts.MayInlineCXXTemporaryDtors220
)
773
1
      return CIP_DisallowedOnce;
774
771
775
771
    // If we did not find the correct this-region, it would be pointless
776
771
    // to inline the destructor. Instead we will simply invalidate
777
771
    // the fake temporary target.
778
771
    if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
779
85
      return CIP_DisallowedOnce;
780
686
    break;
781
686
  }
782
686
  case CE_CXXAllocator:
783
340
    if (Opts.MayInlineCXXAllocator)
784
340
      break;
785
0
    // Do not inline allocators until we model deallocators.
786
0
    // This is unfortunate, but basically necessary for smart pointers and such.
787
0
    return CIP_DisallowedAlways;
788
318
  case CE_ObjCMessage:
789
318
    if (!Opts.MayInlineObjCMethod)
790
1
      return CIP_DisallowedAlways;
791
317
    if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
792
317
          
Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate305
))
793
1
      return CIP_DisallowedAlways;
794
316
    break;
795
30.2k
  }
796
30.2k
797
30.2k
  return CIP_Allowed;
798
30.2k
}
799
800
/// Returns true if the given C++ class contains a member with the given name.
801
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
802
305
                      StringRef Name) {
803
305
  const IdentifierInfo &II = Ctx.Idents.get(Name);
804
305
  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
805
305
  if (!RD->lookup(DeclName).empty())
806
148
    return true;
807
157
808
157
  CXXBasePaths Paths(false, false, false);
809
157
  if (RD->lookupInBases(
810
157
          [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
811
21
            return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
812
21
          },
813
157
          Paths))
814
6
    return true;
815
151
816
151
  return false;
817
151
}
818
819
/// Returns true if the given C++ class is a container or iterator.
820
///
821
/// Our heuristic for this is whether it contains a method named 'begin()' or a
822
/// nested type named 'iterator' or 'iterator_category'.
823
181
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
824
181
  return hasMember(Ctx, RD, "begin") ||
825
181
         
hasMember(Ctx, RD, "iterator")97
||
826
181
         
hasMember(Ctx, RD, "iterator_category")27
;
827
181
}
828
829
/// Returns true if the given function refers to a method of a C++ container
830
/// or iterator.
831
///
832
/// We generally do a poor job modeling most containers right now, and might
833
/// prefer not to inline their methods.
834
static bool isContainerMethod(const ASTContext &Ctx,
835
328
                              const FunctionDecl *FD) {
836
328
  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
837
181
    return isContainerClass(Ctx, MD->getParent());
838
147
  return false;
839
147
}
840
841
/// Returns true if the given function is the destructor of a class named
842
/// "shared_ptr".
843
3.50k
static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
844
3.50k
  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
845
3.50k
  if (!Dtor)
846
3.23k
    return false;
847
275
848
275
  const CXXRecordDecl *RD = Dtor->getParent();
849
275
  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
850
275
    if (II->isStr("shared_ptr"))
851
8
        return true;
852
267
853
267
  return false;
854
267
}
855
856
/// Returns true if the function in \p CalleeADC may be inlined in general.
857
///
858
/// This checks static properties of the function, such as its signature and
859
/// CFG, to determine whether the analyzer should ever consider inlining it,
860
/// in any context.
861
4.43k
bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
862
4.43k
  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
863
4.43k
  // FIXME: Do not inline variadic calls.
864
4.43k
  if (CallEvent::isVariadic(CalleeADC->getDecl()))
865
4
    return false;
866
4.42k
867
4.42k
  // Check certain C++-related inlining policies.
868
4.42k
  ASTContext &Ctx = CalleeADC->getASTContext();
869
4.42k
  if (Ctx.getLangOpts().CPlusPlus) {
870
3.76k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
871
3.68k
      // Conditionally control the inlining of template functions.
872
3.68k
      if (!Opts.MayInlineTemplateFunctions)
873
5
        if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
874
4
          return false;
875
3.67k
876
3.67k
      // Conditionally control the inlining of C++ standard library functions.
877
3.67k
      if (!Opts.MayInlineCXXStandardLibrary)
878
19
        if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
879
19
          if (AnalysisDeclContext::isInStdNamespace(FD))
880
18
            return false;
881
3.65k
882
3.65k
      // Conditionally control the inlining of methods on objects that look
883
3.65k
      // like C++ containers.
884
3.65k
      if (!Opts.MayInlineCXXContainerMethods)
885
3.50k
        if (!AMgr.isInCodeFile(FD->getLocation()))
886
328
          if (isContainerMethod(Ctx, FD))
887
154
            return false;
888
3.50k
889
3.50k
      // Conditionally control the inlining of the destructor of C++ shared_ptr.
890
3.50k
      // We don't currently do a good job modeling shared_ptr because we can't
891
3.50k
      // see the reference count, so treating as opaque is probably the best
892
3.50k
      // idea.
893
3.50k
      if (!Opts.MayInlineCXXSharedPtrDtor)
894
3.50k
        if (isCXXSharedPtrDtor(FD))
895
8
          return false;
896
4.24k
    }
897
3.76k
  }
898
4.24k
899
4.24k
  // It is possible that the CFG cannot be constructed.
900
4.24k
  // Be safe, and check if the CalleeCFG is valid.
901
4.24k
  const CFG *CalleeCFG = CalleeADC->getCFG();
902
4.24k
  if (!CalleeCFG)
903
93
    return false;
904
4.15k
905
4.15k
  // Do not inline large functions.
906
4.15k
  if (isHuge(CalleeADC))
907
2
    return false;
908
4.14k
909
4.14k
  // It is possible that the live variables analysis cannot be
910
4.14k
  // run.  If so, bail out.
911
4.14k
  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
912
0
    return false;
913
4.14k
914
4.14k
  return true;
915
4.14k
}
916
917
bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
918
                                  const ExplodedNode *Pred,
919
57.4k
                                  const EvalCallOptions &CallOpts) {
920
57.4k
  if (!D)
921
25.2k
    return false;
922
32.1k
923
32.1k
  AnalysisManager &AMgr = getAnalysisManager();
924
32.1k
  AnalyzerOptions &Opts = AMgr.options;
925
32.1k
  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
926
32.1k
  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
927
32.1k
928
32.1k
  // The auto-synthesized bodies are essential to inline as they are
929
32.1k
  // usually small and commonly used. Note: we should do this check early on to
930
32.1k
  // ensure we always inline these calls.
931
32.1k
  if (CalleeADC->isBodyAutosynthesized())
932
382
    return true;
933
31.7k
934
31.7k
  if (!AMgr.shouldInlineCall())
935
26
    return false;
936
31.7k
937
31.7k
  // Check if this function has been marked as non-inlinable.
938
31.7k
  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
939
31.7k
  if (MayInline.hasValue()) {
940
27.3k
    if (!MayInline.getValue())
941
879
      return false;
942
4.43k
943
4.43k
  } else {
944
4.43k
    // We haven't actually checked the static properties of this function yet.
945
4.43k
    // Do that now, and record our decision in the function summaries.
946
4.43k
    if (mayInlineDecl(CalleeADC)) {
947
4.14k
      Engine.FunctionSummaries->markMayInline(D);
948
4.14k
    } else {
949
283
      Engine.FunctionSummaries->markShouldNotInline(D);
950
283
      return false;
951
283
    }
952
30.6k
  }
953
30.6k
954
30.6k
  // Check if we should inline a call based on its kind.
955
30.6k
  // FIXME: this checks both static and dynamic properties of the call, which
956
30.6k
  // means we're redoing a bit of work that could be cached in the function
957
30.6k
  // summary.
958
30.6k
  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
959
30.6k
  if (CIP != CIP_Allowed) {
960
346
    if (CIP == CIP_DisallowedAlways) {
961
14
      assert(!MayInline.hasValue() || MayInline.getValue());
962
14
      Engine.FunctionSummaries->markShouldNotInline(D);
963
14
    }
964
346
    return false;
965
346
  }
966
30.2k
967
30.2k
  // Do not inline if recursive or we've reached max stack frame count.
968
30.2k
  bool IsRecursive = false;
969
30.2k
  unsigned StackDepth = 0;
970
30.2k
  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
971
30.2k
  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
972
30.2k
      
(159
!isSmall(CalleeADC)159
||
IsRecursive150
))
973
159
    return false;
974
30.1k
975
30.1k
  // Do not inline large functions too many times.
976
30.1k
  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
977
30.1k
       Opts.MaxTimesInlineLarge) &&
978
30.1k
      
isLarge(CalleeADC)16.3k
) {
979
0
    NumReachedInlineCountMax++;
980
0
    return false;
981
0
  }
982
30.1k
983
30.1k
  if (HowToInline == Inline_Minimal && 
(25
!isSmall(CalleeADC)25
||
IsRecursive24
))
984
1
    return false;
985
30.1k
986
30.1k
  return true;
987
30.1k
}
988
989
57.6k
static bool isTrivialObjectAssignment(const CallEvent &Call) {
990
57.6k
  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
991
57.6k
  if (!ICall)
992
48.5k
    return false;
993
9.05k
994
9.05k
  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
995
9.05k
  if (!MD)
996
1
    return false;
997
9.05k
  if (!(MD->isCopyAssignmentOperator() || 
MD->isMoveAssignmentOperator()8.81k
))
998
7.40k
    return false;
999
1.64k
1000
1.64k
  return MD->isTrivial();
1001
1.64k
}
1002
1003
void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
1004
                                 const CallEvent &CallTemplate,
1005
57.6k
                                 const EvalCallOptions &CallOpts) {
1006
57.6k
  // Make sure we have the most recent state attached to the call.
1007
57.6k
  ProgramStateRef State = Pred->getState();
1008
57.6k
  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1009
57.6k
1010
57.6k
  // Special-case trivial assignment operators.
1011
57.6k
  if (isTrivialObjectAssignment(*Call)) {
1012
121
    performTrivialCopy(Bldr, Pred, *Call);
1013
121
    return;
1014
121
  }
1015
57.4k
1016
57.4k
  // Try to inline the call.
1017
57.4k
  // The origin expression here is just used as a kind of checksum;
1018
57.4k
  // this should still be safe even for CallEvents that don't come from exprs.
1019
57.4k
  const Expr *E = Call->getOriginExpr();
1020
57.4k
1021
57.4k
  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1022
57.4k
  if (InlinedFailedState) {
1023
30
    // If we already tried once and failed, make sure we don't retry later.
1024
30
    State = InlinedFailedState;
1025
57.4k
  } else {
1026
57.4k
    RuntimeDefinition RD = Call->getRuntimeDefinition();
1027
57.4k
    const Decl *D = RD.getDecl();
1028
57.4k
    if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1029
30.4k
      if (RD.mayHaveOtherDefinitions()) {
1030
20
        AnalyzerOptions &Options = getAnalysisManager().options;
1031
20
1032
20
        // Explore with and without inlining the call.
1033
20
        if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1034
17
          BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1035
17
          return;
1036
17
        }
1037
3
1038
3
        // Don't inline if we're not in any dynamic dispatch mode.
1039
3
        if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1040
3
          conservativeEvalCall(*Call, Bldr, Pred, State);
1041
3
          return;
1042
3
        }
1043
30.4k
      }
1044
30.4k
1045
30.4k
      // We are not bifurcating and we do have a Decl, so just inline.
1046
30.4k
      if (inlineCall(*Call, D, Bldr, Pred, State))
1047
30.4k
        return;
1048
27.0k
    }
1049
57.4k
  }
1050
27.0k
1051
27.0k
  // If we can't inline it, handle the return value and invalidate the regions.
1052
27.0k
  conservativeEvalCall(*Call, Bldr, Pred, State);
1053
27.0k
}
1054
1055
void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1056
                               const CallEvent &Call, const Decl *D,
1057
17
                               NodeBuilder &Bldr, ExplodedNode *Pred) {
1058
17
  assert(BifurReg);
1059
17
  BifurReg = BifurReg->StripCasts();
1060
17
1061
17
  // Check if we've performed the split already - note, we only want
1062
17
  // to split the path once per memory region.
1063
17
  ProgramStateRef State = Pred->getState();
1064
17
  const unsigned *BState =
1065
17
                        State->get<DynamicDispatchBifurcationMap>(BifurReg);
1066
17
  if (BState) {
1067
6
    // If we are on "inline path", keep inlining if possible.
1068
6
    if (*BState == DynamicDispatchModeInlined)
1069
3
      if (inlineCall(Call, D, Bldr, Pred, State))
1070
3
        return;
1071
3
    // If inline failed, or we are on the path where we assume we
1072
3
    // don't have enough info about the receiver to inline, conjure the
1073
3
    // return value and invalidate the regions.
1074
3
    conservativeEvalCall(Call, Bldr, Pred, State);
1075
3
    return;
1076
3
  }
1077
11
1078
11
  // If we got here, this is the first time we process a message to this
1079
11
  // region, so split the path.
1080
11
  ProgramStateRef IState =
1081
11
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1082
11
                                               DynamicDispatchModeInlined);
1083
11
  inlineCall(Call, D, Bldr, Pred, IState);
1084
11
1085
11
  ProgramStateRef NoIState =
1086
11
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1087
11
                                               DynamicDispatchModeConservative);
1088
11
  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1089
11
1090
11
  NumOfDynamicDispatchPathSplits++;
1091
11
}
1092
1093
void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
1094
21.3k
                                 ExplodedNodeSet &Dst) {
1095
21.3k
  ExplodedNodeSet dstPreVisit;
1096
21.3k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1097
21.3k
1098
21.3k
  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1099
21.3k
1100
21.3k
  if (RS->getRetValue()) {
1101
18.9k
    for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1102
37.6k
                                  ei = dstPreVisit.end(); it != ei; 
++it18.7k
) {
1103
18.7k
      B.generateNode(RS, *it, (*it)->getState());
1104
18.7k
    }
1105
18.9k
  }
1106
21.3k
}