Coverage Report

Created: 2020-02-25 14:32

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp
Line
Count
Source (jump to first uncovered line)
1
//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
//  This file defines ExprEngine's support for calls and returns.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "clang/AST/Decl.h"
14
#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
15
#include "PrettyStackTraceLocationContext.h"
16
#include "clang/AST/CXXInheritance.h"
17
#include "clang/AST/DeclCXX.h"
18
#include "clang/Analysis/Analyses/LiveVariables.h"
19
#include "clang/Analysis/ConstructionContext.h"
20
#include "clang/StaticAnalyzer/Core/CheckerManager.h"
21
#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
22
#include "llvm/ADT/SmallSet.h"
23
#include "llvm/ADT/Statistic.h"
24
#include "llvm/Support/SaveAndRestore.h"
25
26
using namespace clang;
27
using namespace ento;
28
29
#define DEBUG_TYPE "ExprEngine"
30
31
STATISTIC(NumOfDynamicDispatchPathSplits,
32
  "The # of times we split the path due to imprecise dynamic dispatch info");
33
34
STATISTIC(NumInlinedCalls,
35
  "The # of times we inlined a call");
36
37
STATISTIC(NumReachedInlineCountMax,
38
  "The # of times we reached inline count maximum");
39
40
void ExprEngine::processCallEnter(NodeBuilderContext& BC, CallEnter CE,
41
63.7k
                                  ExplodedNode *Pred) {
42
63.7k
  // Get the entry block in the CFG of the callee.
43
63.7k
  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44
63.7k
  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45
63.7k
  const CFGBlock *Entry = CE.getEntry();
46
63.7k
47
63.7k
  // Validate the CFG.
48
63.7k
  assert(Entry->empty());
49
63.7k
  assert(Entry->succ_size() == 1);
50
63.7k
51
63.7k
  // Get the solitary successor.
52
63.7k
  const CFGBlock *Succ = *(Entry->succ_begin());
53
63.7k
54
63.7k
  // Construct an edge representing the starting location in the callee.
55
63.7k
  BlockEdge Loc(Entry, Succ, calleeCtx);
56
63.7k
57
63.7k
  ProgramStateRef state = Pred->getState();
58
63.7k
59
63.7k
  // Construct a new node, notify checkers that analysis of the function has
60
63.7k
  // begun, and add the resultant nodes to the worklist.
61
63.7k
  bool isNew;
62
63.7k
  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63
63.7k
  Node->addPredecessor(Pred, G);
64
63.7k
  if (isNew) {
65
63.7k
    ExplodedNodeSet DstBegin;
66
63.7k
    processBeginOfFunction(BC, Node, DstBegin, Loc);
67
63.7k
    Engine.enqueue(DstBegin);
68
63.7k
  }
69
63.7k
}
70
71
// Find the last statement on the path to the exploded node and the
72
// corresponding Block.
73
static std::pair<const Stmt*,
74
99.9k
                 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75
99.9k
  const Stmt *S = nullptr;
76
99.9k
  const CFGBlock *Blk = nullptr;
77
99.9k
  const StackFrameContext *SF = Node->getStackFrame();
78
99.9k
79
99.9k
  // Back up through the ExplodedGraph until we reach a statement node in this
80
99.9k
  // stack frame.
81
300k
  while (Node) {
82
300k
    const ProgramPoint &PP = Node->getLocation();
83
300k
84
300k
    if (PP.getStackFrame() == SF) {
85
289k
      if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86
71.1k
        S = SP->getStmt();
87
71.1k
        break;
88
218k
      } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89
17.4k
        S = CEE->getCalleeContext()->getCallSite();
90
17.4k
        if (S)
91
17.0k
          break;
92
355
93
355
        // If there is no statement, this is an implicitly-generated call.
94
355
        // We'll walk backwards over it and then continue the loop to find
95
355
        // an actual statement.
96
355
        Optional<CallEnter> CE;
97
5.71k
        do {
98
5.71k
          Node = Node->getFirstPred();
99
5.71k
          CE = Node->getLocationAs<CallEnter>();
100
5.71k
        } while (!CE || 
CE->getCalleeContext() != CEE->getCalleeContext()514
);
101
355
102
355
        // Continue searching the graph.
103
200k
      } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104
112k
        Blk = BE->getSrc();
105
112k
      }
106
289k
    } else 
if (Optional<CallEnter> 11.0k
CE11.0k
= PP.getAs<CallEnter>()) {
107
11.0k
      // If we reached the CallEnter for this function, it has no statements.
108
11.0k
      if (CE->getCalleeContext() == SF)
109
11.0k
        break;
110
201k
    }
111
201k
112
201k
    if (Node->pred_empty())
113
605
      return std::make_pair(nullptr, nullptr);
114
200k
115
200k
    Node = *Node->pred_begin();
116
200k
  }
117
99.9k
118
99.9k
  
return std::make_pair(S, Blk)99.3k
;
119
99.9k
}
120
121
/// Adjusts a return value when the called function's return type does not
122
/// match the caller's expression type. This can happen when a dynamic call
123
/// is devirtualized, and the overriding method has a covariant (more specific)
124
/// return type than the parent's method. For C++ objects, this means we need
125
/// to add base casts.
126
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127
73
                              StoreManager &StoreMgr) {
128
73
  // For now, the only adjustments we handle apply only to locations.
129
73
  if (!V.getAs<Loc>())
130
61
    return V;
131
12
132
12
  // If the types already match, don't do any unnecessary work.
133
12
  ExpectedTy = ExpectedTy.getCanonicalType();
134
12
  ActualTy = ActualTy.getCanonicalType();
135
12
  if (ExpectedTy == ActualTy)
136
10
    return V;
137
2
138
2
  // No adjustment is needed between Objective-C pointer types.
139
2
  if (ExpectedTy->isObjCObjectPointerType() &&
140
2
      
ActualTy->isObjCObjectPointerType()0
)
141
0
    return V;
142
2
143
2
  // C++ object pointers may need "derived-to-base" casts.
144
2
  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
145
2
  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146
2
  if (ExpectedClass && 
ActualClass1
) {
147
1
    CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148
1
                       /*DetectVirtual=*/false);
149
1
    if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150
1
        !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151
1
      return StoreMgr.evalDerivedToBase(V, Paths.front());
152
1
    }
153
1
  }
154
1
155
1
  // Unfortunately, Objective-C does not enforce that overridden methods have
156
1
  // covariant return types, so we can't assert that that never happens.
157
1
  // Be safe and return UnknownVal().
158
1
  return UnknownVal();
159
1
}
160
161
void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
162
                                           ExplodedNode *Pred,
163
21.8k
                                           ExplodedNodeSet &Dst) {
164
21.8k
  // Find the last statement in the function and the corresponding basic block.
165
21.8k
  const Stmt *LastSt = nullptr;
166
21.8k
  const CFGBlock *Blk = nullptr;
167
21.8k
  std::tie(LastSt, Blk) = getLastStmt(Pred);
168
21.8k
  if (!Blk || 
!LastSt21.2k
) {
169
605
    Dst.Add(Pred);
170
605
    return;
171
605
  }
172
21.2k
173
21.2k
  // Here, we destroy the current location context. We use the current
174
21.2k
  // function's entire body as a diagnostic statement, with which the program
175
21.2k
  // point will be associated. However, we only want to use LastStmt as a
176
21.2k
  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177
21.2k
  // is dead.
178
21.2k
  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179
21.2k
  const LocationContext *LCtx = Pred->getLocationContext();
180
21.2k
  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
181
21.2k
             LCtx->getAnalysisDeclContext()->getBody(),
182
21.2k
             ProgramPoint::PostStmtPurgeDeadSymbolsKind);
183
21.2k
}
184
185
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
186
20.9k
    const StackFrameContext *calleeCtx) {
187
20.9k
  const Decl *RuntimeCallee = calleeCtx->getDecl();
188
20.9k
  const Decl *StaticDecl = Call->getDecl();
189
20.9k
  assert(RuntimeCallee);
190
20.9k
  if (!StaticDecl)
191
0
    return true;
192
20.9k
  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193
20.9k
}
194
195
/// The call exit is simulated with a sequence of nodes, which occur between
196
/// CallExitBegin and CallExitEnd. The following operations occur between the
197
/// two program points:
198
/// 1. CallExitBegin (triggers the start of call exit sequence)
199
/// 2. Bind the return value
200
/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
201
/// 4. CallExitEnd (switch to the caller context)
202
/// 5. PostStmt<CallExpr>
203
78.0k
void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
204
78.0k
  // Step 1 CEBNode was generated before the call.
205
78.0k
  PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
206
78.0k
  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
207
78.0k
208
78.0k
  // The parent context might not be a stack frame, so make sure we
209
78.0k
  // look up the first enclosing stack frame.
210
78.0k
  const StackFrameContext *callerCtx =
211
78.0k
    calleeCtx->getParent()->getStackFrame();
212
78.0k
213
78.0k
  const Stmt *CE = calleeCtx->getCallSite();
214
78.0k
  ProgramStateRef state = CEBNode->getState();
215
78.0k
  // Find the last statement in the function and the corresponding basic block.
216
78.0k
  const Stmt *LastSt = nullptr;
217
78.0k
  const CFGBlock *Blk = nullptr;
218
78.0k
  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
219
78.0k
220
78.0k
  // Generate a CallEvent /before/ cleaning the state, so that we can get the
221
78.0k
  // correct value for 'this' (if necessary).
222
78.0k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
223
78.0k
  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
224
78.0k
225
78.0k
  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
226
78.0k
227
78.0k
  // If the callee returns an expression, bind its value to CallExpr.
228
78.0k
  if (CE) {
229
77.3k
    if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
230
20.9k
      const LocationContext *LCtx = CEBNode->getLocationContext();
231
20.9k
      SVal V = state->getSVal(RS, LCtx);
232
20.9k
233
20.9k
      // Ensure that the return type matches the type of the returned Expr.
234
20.9k
      if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
235
73
        QualType ReturnedTy =
236
73
          CallEvent::getDeclaredResultType(calleeCtx->getDecl());
237
73
        if (!ReturnedTy.isNull()) {
238
73
          if (const Expr *Ex = dyn_cast<Expr>(CE)) {
239
73
            V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
240
73
                                  getStoreManager());
241
73
          }
242
73
        }
243
73
      }
244
20.9k
245
20.9k
      state = state->BindExpr(CE, callerCtx, V);
246
20.9k
    }
247
77.3k
248
77.3k
    // Bind the constructed object value to CXXConstructExpr.
249
77.3k
    if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250
18.8k
      loc::MemRegionVal This =
251
18.8k
        svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
252
18.8k
      SVal ThisV = state->getSVal(This);
253
18.8k
      ThisV = state->getSVal(ThisV.castAs<Loc>());
254
18.8k
      state = state->BindExpr(CCE, callerCtx, ThisV);
255
18.8k
    }
256
77.3k
257
77.3k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
258
5.71k
      // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
259
5.71k
      // while to reach the actual CXXNewExpr element from here, so keep the
260
5.71k
      // region for later use.
261
5.71k
      // Additionally cast the return value of the inlined operator new
262
5.71k
      // (which is of type 'void *') to the correct object type.
263
5.71k
      SVal AllocV = state->getSVal(CNE, callerCtx);
264
5.71k
      AllocV = svalBuilder.evalCast(
265
5.71k
          AllocV, CNE->getType(),
266
5.71k
          getContext().getPointerType(getContext().VoidTy));
267
5.71k
268
5.71k
      state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
269
5.71k
                                         AllocV);
270
5.71k
    }
271
77.3k
  }
272
78.0k
273
78.0k
  // Step 3: BindedRetNode -> CleanedNodes
274
78.0k
  // If we can find a statement and a block in the inlined function, run remove
275
78.0k
  // dead bindings before returning from the call. This is important to ensure
276
78.0k
  // that we report the issues such as leaks in the stack contexts in which
277
78.0k
  // they occurred.
278
78.0k
  ExplodedNodeSet CleanedNodes;
279
78.0k
  if (LastSt && 
Blk66.9k
&&
AMgr.options.AnalysisPurgeOpt != PurgeNone66.9k
) {
280
66.9k
    static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
281
66.9k
    PostStmt Loc(LastSt, calleeCtx, &retValBind);
282
66.9k
    bool isNew;
283
66.9k
    ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
284
66.9k
    BindedRetNode->addPredecessor(CEBNode, G);
285
66.9k
    if (!isNew)
286
0
      return;
287
66.9k
288
66.9k
    NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
289
66.9k
    currBldrCtx = &Ctx;
290
66.9k
    // Here, we call the Symbol Reaper with 0 statement and callee location
291
66.9k
    // context, telling it to clean up everything in the callee's context
292
66.9k
    // (and its children). We use the callee's function body as a diagnostic
293
66.9k
    // statement, with which the program point will be associated.
294
66.9k
    removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
295
66.9k
               calleeCtx->getAnalysisDeclContext()->getBody(),
296
66.9k
               ProgramPoint::PostStmtPurgeDeadSymbolsKind);
297
66.9k
    currBldrCtx = nullptr;
298
66.9k
  } else {
299
11.0k
    CleanedNodes.Add(CEBNode);
300
11.0k
  }
301
78.0k
302
78.0k
  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
303
153k
                                 E = CleanedNodes.end(); I != E; 
++I75.4k
) {
304
75.4k
305
75.4k
    // Step 4: Generate the CallExit and leave the callee's context.
306
75.4k
    // CleanedNodes -> CEENode
307
75.4k
    CallExitEnd Loc(calleeCtx, callerCtx);
308
75.4k
    bool isNew;
309
75.4k
    ProgramStateRef CEEState = (*I == CEBNode) ? 
state11.0k
:
(*I)->getState()64.4k
;
310
75.4k
311
75.4k
    ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
312
75.4k
    CEENode->addPredecessor(*I, G);
313
75.4k
    if (!isNew)
314
0
      return;
315
75.4k
316
75.4k
    // Step 5: Perform the post-condition check of the CallExpr and enqueue the
317
75.4k
    // result onto the work list.
318
75.4k
    // CEENode -> Dst -> WorkList
319
75.4k
    NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
320
75.4k
    SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
321
75.4k
        &Ctx);
322
75.4k
    SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
323
75.4k
324
75.4k
    CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
325
75.4k
326
75.4k
    ExplodedNodeSet DstPostCall;
327
75.4k
    if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
328
5.71k
      ExplodedNodeSet DstPostPostCallCallback;
329
5.71k
      getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
330
5.71k
                                                 CEENode, *UpdatedCall, *this,
331
5.71k
                                                 /*wasInlined=*/true);
332
5.71k
      for (auto I : DstPostPostCallCallback) {
333
5.71k
        getCheckerManager().runCheckersForNewAllocator(
334
5.71k
            CNE,
335
5.71k
            *getObjectUnderConstruction(I->getState(), CNE,
336
5.71k
                                        calleeCtx->getParent()),
337
5.71k
            DstPostCall, I, *this,
338
5.71k
            /*wasInlined=*/true);
339
5.71k
      }
340
69.7k
    } else {
341
69.7k
      getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
342
69.7k
                                                 *UpdatedCall, *this,
343
69.7k
                                                 /*wasInlined=*/true);
344
69.7k
    }
345
75.4k
    ExplodedNodeSet Dst;
346
75.4k
    if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
347
574
      getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
348
574
                                                        *this,
349
574
                                                        /*wasInlined=*/true);
350
74.8k
    } else if (CE &&
351
74.8k
               
!(74.1k
isa<CXXNewExpr>(CE)74.1k
&& // Called when visiting CXXNewExpr.
352
74.1k
                 
AMgr.getAnalyzerOptions().MayInlineCXXAllocator5.71k
)) {
353
68.4k
      getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
354
68.4k
                                                 *this, /*wasInlined=*/true);
355
68.4k
    } else {
356
6.42k
      Dst.insert(DstPostCall);
357
6.42k
    }
358
75.4k
359
75.4k
    // Enqueue the next element in the block.
360
75.4k
    for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
361
150k
                                   PSI != PSE; 
++PSI75.4k
) {
362
75.4k
      Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
363
75.4k
                                    calleeCtx->getIndex()+1);
364
75.4k
    }
365
75.4k
  }
366
78.0k
}
367
368
151k
bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
369
151k
  // When there are no branches in the function, it means that there's no
370
151k
  // exponential complexity introduced by inlining such function.
371
151k
  // Such functions also don't trigger various fundamental problems
372
151k
  // with our inlining mechanism, such as the problem of
373
151k
  // inlined defensive checks. Hence isLinear().
374
151k
  const CFG *Cfg = ADC->getCFG();
375
151k
  return Cfg->isLinear() || 
Cfg->size() <= AMgr.options.AlwaysInlineSize36.0k
;
376
151k
}
377
378
48.9k
bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
379
48.9k
  const CFG *Cfg = ADC->getCFG();
380
48.9k
  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
381
48.9k
}
382
383
4.46k
bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
384
4.46k
  const CFG *Cfg = ADC->getCFG();
385
4.46k
  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
386
4.46k
}
387
388
void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
389
63.5k
                               bool &IsRecursive, unsigned &StackDepth) {
390
63.5k
  IsRecursive = false;
391
63.5k
  StackDepth = 0;
392
63.5k
393
215k
  while (LCtx) {
394
152k
    if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
395
152k
      const Decl *DI = SFC->getDecl();
396
152k
397
152k
      // Mark recursive (and mutually recursive) functions and always count
398
152k
      // them when measuring the stack depth.
399
152k
      if (DI == D) {
400
1.46k
        IsRecursive = true;
401
1.46k
        ++StackDepth;
402
1.46k
        LCtx = LCtx->getParent();
403
1.46k
        continue;
404
1.46k
      }
405
150k
406
150k
      // Do not count the small functions when determining the stack depth.
407
150k
      AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
408
150k
      if (!isSmall(CalleeADC))
409
36.0k
        ++StackDepth;
410
150k
    }
411
152k
    LCtx = LCtx->getParent();
412
150k
  }
413
63.5k
}
414
415
// The GDM component containing the dynamic dispatch bifurcation info. When
416
// the exact type of the receiver is not known, we want to explore both paths -
417
// one on which we do inline it and the other one on which we don't. This is
418
// done to ensure we do not drop coverage.
419
// This is the map from the receiver region to a bool, specifying either we
420
// consider this region's information precise or not along the given path.
421
namespace {
422
  enum DynamicDispatchMode {
423
    DynamicDispatchModeInlined = 1,
424
    DynamicDispatchModeConservative
425
  };
426
} // end anonymous namespace
427
428
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
429
                               const MemRegion *, unsigned)
430
431
bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
432
                            NodeBuilder &Bldr, ExplodedNode *Pred,
433
63.7k
                            ProgramStateRef State) {
434
63.7k
  assert(D);
435
63.7k
436
63.7k
  const LocationContext *CurLC = Pred->getLocationContext();
437
63.7k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
438
63.7k
  const LocationContext *ParentOfCallee = CallerSFC;
439
63.7k
  if (Call.getKind() == CE_Block &&
440
63.7k
      
!cast<BlockCall>(Call).isConversionFromLambda()174
) {
441
167
    const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
442
167
    assert(BR && "If we have the block definition we should have its region");
443
167
    AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
444
167
    ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
445
167
                                                         cast<BlockDecl>(D),
446
167
                                                         BR);
447
167
  }
448
63.7k
449
63.7k
  // This may be NULL, but that's fine.
450
63.7k
  const Expr *CallE = Call.getOriginExpr();
451
63.7k
452
63.7k
  // Construct a new stack frame for the callee.
453
63.7k
  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
454
63.7k
  const StackFrameContext *CalleeSFC =
455
63.7k
      CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
456
63.7k
                               currBldrCtx->blockCount(), currStmtIdx);
457
63.7k
458
63.7k
  CallEnter Loc(CallE, CalleeSFC, CurLC);
459
63.7k
460
63.7k
  // Construct a new state which contains the mapping from actual to
461
63.7k
  // formal arguments.
462
63.7k
  State = State->enterStackFrame(Call, CalleeSFC);
463
63.7k
464
63.7k
  bool isNew;
465
63.7k
  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
466
63.7k
    N->addPredecessor(Pred, G);
467
63.7k
    if (isNew)
468
63.7k
      Engine.getWorkList()->enqueue(N);
469
63.7k
  }
470
63.7k
471
63.7k
  // If we decided to inline the call, the successor has been manually
472
63.7k
  // added onto the work list so remove it from the node builder.
473
63.7k
  Bldr.takeNodes(Pred);
474
63.7k
475
63.7k
  NumInlinedCalls++;
476
63.7k
  Engine.FunctionSummaries->bumpNumTimesInlined(D);
477
63.7k
478
63.7k
  // Mark the decl as visited.
479
63.7k
  if (VisitedCallees)
480
63.7k
    VisitedCallees->insert(D);
481
63.7k
482
63.7k
  return true;
483
63.7k
}
484
485
static ProgramStateRef getInlineFailedState(ProgramStateRef State,
486
97.3k
                                            const Stmt *CallE) {
487
97.3k
  const void *ReplayState = State->get<ReplayWithoutInlining>();
488
97.3k
  if (!ReplayState)
489
97.3k
    return nullptr;
490
32
491
32
  assert(ReplayState == CallE && "Backtracked to the wrong call.");
492
32
  (void)CallE;
493
32
494
32
  return State->remove<ReplayWithoutInlining>();
495
32
}
496
497
void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
498
82.3k
                               ExplodedNodeSet &dst) {
499
82.3k
  // Perform the previsit of the CallExpr.
500
82.3k
  ExplodedNodeSet dstPreVisit;
501
82.3k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
502
82.3k
503
82.3k
  // Get the call in its initial state. We use this as a template to perform
504
82.3k
  // all the checks.
505
82.3k
  CallEventManager &CEMgr = getStateManager().getCallEventManager();
506
82.3k
  CallEventRef<> CallTemplate
507
82.3k
    = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
508
82.3k
509
82.3k
  // Evaluate the function call.  We try each of the checkers
510
82.3k
  // to see if the can evaluate the function call.
511
82.3k
  ExplodedNodeSet dstCallEvaluated;
512
82.3k
  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
513
164k
       I != E; 
++I82.3k
) {
514
82.3k
    evalCall(dstCallEvaluated, *I, *CallTemplate);
515
82.3k
  }
516
82.3k
517
82.3k
  // Finally, perform the post-condition check of the CallExpr and store
518
82.3k
  // the created nodes in 'Dst'.
519
82.3k
  // Note that if the call was inlined, dstCallEvaluated will be empty.
520
82.3k
  // The post-CallExpr check will occur in processCallExit.
521
82.3k
  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
522
82.3k
                                             *this);
523
82.3k
}
524
525
ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
526
143k
                                                       const CallEvent &Call) {
527
143k
  const Expr *E = Call.getOriginExpr();
528
143k
  // FIXME: Constructors to placement arguments of operator new
529
143k
  // are not supported yet.
530
143k
  if (!E || 
isa<CXXNewExpr>(E)142k
)
531
6.42k
    return State;
532
137k
533
137k
  const LocationContext *LC = Call.getLocationContext();
534
252k
  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; 
++CallI115k
) {
535
115k
    unsigned I = Call.getASTArgumentIndex(CallI);
536
115k
    if (Optional<SVal> V =
537
11.7k
            getObjectUnderConstruction(State, {E, I}, LC)) {
538
11.7k
      SVal VV = *V;
539
11.7k
      (void)VV;
540
11.7k
      assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
541
11.7k
                 ->getStackFrame()->getParent()
542
11.7k
                 ->getStackFrame() == LC->getStackFrame());
543
11.7k
      State = finishObjectConstruction(State, {E, I}, LC);
544
11.7k
    }
545
115k
  }
546
137k
547
137k
  return State;
548
137k
}
549
550
void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
551
                                            ExplodedNode *Pred,
552
65.6k
                                            const CallEvent &Call) {
553
65.6k
  ProgramStateRef State = Pred->getState();
554
65.6k
  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
555
65.6k
  if (CleanedState == State) {
556
65.6k
    Dst.insert(Pred);
557
65.6k
    return;
558
65.6k
  }
559
18
560
18
  const Expr *E = Call.getOriginExpr();
561
18
  const LocationContext *LC = Call.getLocationContext();
562
18
  NodeBuilder B(Pred, Dst, *currBldrCtx);
563
18
  static SimpleProgramPointTag Tag("ExprEngine",
564
18
                                   "Finish argument construction");
565
18
  PreStmt PP(E, LC, &Tag);
566
18
  B.generateNode(PP, CleanedState, Pred);
567
18
}
568
569
void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
570
82.3k
                          const CallEvent &Call) {
571
82.3k
  // WARNING: At this time, the state attached to 'Call' may be older than the
572
82.3k
  // state in 'Pred'. This is a minor optimization since CheckerManager will
573
82.3k
  // use an updated CallEvent instance when calling checkers, but if 'Call' is
574
82.3k
  // ever used directly in this function all callers should be updated to pass
575
82.3k
  // the most recent state. (It is probably not worth doing the work here since
576
82.3k
  // for some callers this will not be necessary.)
577
82.3k
578
82.3k
  // Run any pre-call checks using the generic call interface.
579
82.3k
  ExplodedNodeSet dstPreVisit;
580
82.3k
  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
581
82.3k
                                            Call, *this);
582
82.3k
583
82.3k
  // Actually evaluate the function call.  We try each of the checkers
584
82.3k
  // to see if the can evaluate the function call, and get a callback at
585
82.3k
  // defaultEvalCall if all of them fail.
586
82.3k
  ExplodedNodeSet dstCallEvaluated;
587
82.3k
  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
588
82.3k
                                             Call, *this);
589
82.3k
590
82.3k
  // If there were other constructors called for object-type arguments
591
82.3k
  // of this call, clean them up.
592
82.3k
  ExplodedNodeSet dstArgumentCleanup;
593
82.3k
  for (auto I : dstCallEvaluated)
594
40.9k
    finishArgumentConstruction(dstArgumentCleanup, I, Call);
595
82.3k
596
82.3k
  ExplodedNodeSet dstPostCall;
597
82.3k
  getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
598
82.3k
                                             Call, *this);
599
82.3k
600
82.3k
  // Escaping symbols conjured during invalidating the regions above.
601
82.3k
  // Note that, for inlined calls the nodes were put back into the worklist,
602
82.3k
  // so we can assume that every node belongs to a conservative call at this
603
82.3k
  // point.
604
82.3k
605
82.3k
  // Run pointerEscape callback with the newly conjured symbols.
606
82.3k
  SmallVector<std::pair<SVal, SVal>, 8> Escaped;
607
82.3k
  for (auto I : dstPostCall) {
608
40.3k
    NodeBuilder B(I, Dst, *currBldrCtx);
609
40.3k
    ProgramStateRef State = I->getState();
610
40.3k
    Escaped.clear();
611
40.3k
    {
612
40.3k
      unsigned Arg = -1;
613
40.3k
      for (const ParmVarDecl *PVD : Call.parameters()) {
614
39.0k
        ++Arg;
615
39.0k
        QualType ParamTy = PVD->getType();
616
39.0k
        if (ParamTy.isNull() ||
617
39.0k
            (!ParamTy->isPointerType() && 
!ParamTy->isReferenceType()29.2k
))
618
23.0k
          continue;
619
15.9k
        QualType Pointee = ParamTy->getPointeeType();
620
15.9k
        if (Pointee.isConstQualified() || 
Pointee->isVoidType()7.99k
)
621
9.08k
          continue;
622
6.87k
        if (const MemRegion *MR = Call.getArgSVal(Arg).getAsRegion())
623
5.82k
          Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
624
6.87k
      }
625
40.3k
    }
626
40.3k
627
40.3k
    State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
628
40.3k
                                        PSK_EscapeOutParameters, &Call);
629
40.3k
630
40.3k
    if (State == I->getState())
631
40.3k
      Dst.insert(I);
632
6
    else
633
6
      B.generateNode(I->getLocation(), State, I);
634
40.3k
  }
635
82.3k
}
636
637
ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
638
                                            const LocationContext *LCtx,
639
48.4k
                                            ProgramStateRef State) {
640
48.4k
  const Expr *E = Call.getOriginExpr();
641
48.4k
  if (!E)
642
1.07k
    return State;
643
47.3k
644
47.3k
  // Some method families have known return values.
645
47.3k
  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
646
3.42k
    switch (Msg->getMethodFamily()) {
647
3.11k
    default:
648
3.11k
      break;
649
310
    case OMF_autorelease:
650
310
    case OMF_retain:
651
310
    case OMF_self: {
652
310
      // These methods return their receivers.
653
310
      return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
654
43.9k
    }
655
43.9k
    }
656
43.9k
  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
657
16.1k
    SVal ThisV = C->getCXXThisVal();
658
16.1k
    ThisV = State->getSVal(ThisV.castAs<Loc>());
659
16.1k
    return State->BindExpr(E, LCtx, ThisV);
660
16.1k
  }
661
30.9k
662
30.9k
  SVal R;
663
30.9k
  QualType ResultTy = Call.getResultType();
664
30.9k
  unsigned Count = currBldrCtx->blockCount();
665
30.9k
  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
666
1.70k
    // Conjure a temporary if the function returns an object by value.
667
1.70k
    SVal Target;
668
1.70k
    assert(RTC->getStmt() == Call.getOriginExpr());
669
1.70k
    EvalCallOptions CallOpts; // FIXME: We won't really need those.
670
1.70k
    std::tie(State, Target) =
671
1.70k
        handleConstructionContext(Call.getOriginExpr(), State, LCtx,
672
1.70k
                                  RTC->getConstructionContext(), CallOpts);
673
1.70k
    const MemRegion *TargetR = Target.getAsRegion();
674
1.70k
    assert(TargetR);
675
1.70k
    // Invalidate the region so that it didn't look uninitialized. If this is
676
1.70k
    // a field or element constructor, we do not want to invalidate
677
1.70k
    // the whole structure. Pointer escape is meaningless because
678
1.70k
    // the structure is a product of conservative evaluation
679
1.70k
    // and therefore contains nothing interesting at this point.
680
1.70k
    RegionAndSymbolInvalidationTraits ITraits;
681
1.70k
    ITraits.setTrait(TargetR,
682
1.70k
        RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
683
1.70k
    State = State->invalidateRegions(TargetR, E, Count, LCtx,
684
1.70k
                                     /* CausesPointerEscape=*/false, nullptr,
685
1.70k
                                     &Call, &ITraits);
686
1.70k
687
1.70k
    R = State->getSVal(Target.castAs<Loc>(), E->getType());
688
29.1k
  } else {
689
29.1k
    // Conjure a symbol if the return value is unknown.
690
29.1k
691
29.1k
    // See if we need to conjure a heap pointer instead of
692
29.1k
    // a regular unknown pointer.
693
29.1k
    bool IsHeapPointer = false;
694
29.1k
    if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
695
703
      if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
696
654
        // FIXME: Delegate this to evalCall in MallocChecker?
697
654
        IsHeapPointer = true;
698
654
      }
699
29.1k
700
29.1k
    R = IsHeapPointer ? 
svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)654
701
29.1k
                      : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
702
28.5k
                                                     Count);
703
29.1k
  }
704
30.9k
  return State->BindExpr(E, LCtx, R);
705
30.9k
}
706
707
// Conservatively evaluate call by invalidating regions and binding
708
// a conjured return value.
709
void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
710
33.6k
                                      ExplodedNode *Pred, ProgramStateRef State) {
711
33.6k
  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
712
33.6k
  State = bindReturnValue(Call, Pred->getLocationContext(), State);
713
33.6k
714
33.6k
  // And make the result node.
715
33.6k
  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
716
33.6k
}
717
718
ExprEngine::CallInlinePolicy
719
ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
720
                              AnalyzerOptions &Opts,
721
63.8k
                              const ExprEngine::EvalCallOptions &CallOpts) {
722
63.8k
  const LocationContext *CurLC = Pred->getLocationContext();
723
63.8k
  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
724
63.8k
  switch (Call.getKind()) {
725
16.9k
  case CE_Function:
726
16.9k
  case CE_Block:
727
16.9k
    break;
728
22.7k
  case CE_CXXMember:
729
22.7k
  case CE_CXXMemberOperator:
730
22.7k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
731
0
      return CIP_DisallowedAlways;
732
22.7k
    break;
733
22.7k
  case CE_CXXConstructor: {
734
17.2k
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
735
0
      return CIP_DisallowedAlways;
736
17.2k
737
17.2k
    const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
738
17.2k
739
17.2k
    const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
740
17.2k
741
17.2k
    auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
742
17.2k
    const ConstructionContext *CC = CCE ? 
CCE->getConstructionContext()17.0k
743
17.2k
                                        : 
nullptr244
;
744
17.2k
745
17.2k
    if (CC && 
isa<NewAllocatedObjectConstructionContext>(CC)17.0k
&&
746
17.2k
        
!Opts.MayInlineCXXAllocator328
)
747
3
      return CIP_DisallowedOnce;
748
17.2k
749
17.2k
    // FIXME: We don't handle constructors or destructors for arrays properly.
750
17.2k
    // Even once we do, we still need to be careful about implicitly-generated
751
17.2k
    // initializers for array fields in default move/copy constructors.
752
17.2k
    // We still allow construction into ElementRegion targets when they don't
753
17.2k
    // represent array elements.
754
17.2k
    if (CallOpts.IsArrayCtorOrDtor)
755
139
      return CIP_DisallowedOnce;
756
17.1k
757
17.1k
    // Inlining constructors requires including initializers in the CFG.
758
17.1k
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
759
17.1k
    assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
760
17.1k
    (void)ADC;
761
17.1k
762
17.1k
    // If the destructor is trivial, it's always safe to inline the constructor.
763
17.1k
    if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
764
16.1k
      break;
765
972
766
972
    // For other types, only inline constructors if destructor inlining is
767
972
    // also enabled.
768
972
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
769
14
      return CIP_DisallowedAlways;
770
958
771
958
    if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
772
827
      // If we don't handle temporary destructors, we shouldn't inline
773
827
      // their constructors.
774
827
      if (CallOpts.IsTemporaryCtorOrDtor &&
775
827
          
!Opts.ShouldIncludeTemporaryDtorsInCFG264
)
776
76
        return CIP_DisallowedOnce;
777
751
778
751
      // If we did not find the correct this-region, it would be pointless
779
751
      // to inline the constructor. Instead we will simply invalidate
780
751
      // the fake temporary target.
781
751
      if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
782
18
        return CIP_DisallowedOnce;
783
733
784
733
      // If the temporary is lifetime-extended by binding it to a reference-type
785
733
      // field within an aggregate, automatic destructors don't work properly.
786
733
      if (CallOpts.IsTemporaryLifetimeExtendedViaAggregate)
787
16
        return CIP_DisallowedOnce;
788
848
    }
789
848
790
848
    break;
791
848
  }
792
848
  case CE_CXXInheritedConstructor: {
793
4
    // This doesn't really increase the cost of inlining ever, because
794
4
    // the stack frame of the inherited constructor is trivial.
795
4
    return CIP_Allowed;
796
848
  }
797
848
  case CE_CXXDestructor: {
798
752
    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
799
6
      return CIP_DisallowedAlways;
800
746
801
746
    // Inlining destructors requires building the CFG correctly.
802
746
    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
803
746
    assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
804
746
    (void)ADC;
805
746
806
746
    // FIXME: We don't handle constructors or destructors for arrays properly.
807
746
    if (CallOpts.IsArrayCtorOrDtor)
808
16
      return CIP_DisallowedOnce;
809
730
810
730
    // Allow disabling temporary destructor inlining with a separate option.
811
730
    if (CallOpts.IsTemporaryCtorOrDtor &&
812
730
        
!Opts.MayInlineCXXTemporaryDtors148
)
813
1
      return CIP_DisallowedOnce;
814
729
815
729
    // If we did not find the correct this-region, it would be pointless
816
729
    // to inline the destructor. Instead we will simply invalidate
817
729
    // the fake temporary target.
818
729
    if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
819
17
      return CIP_DisallowedOnce;
820
712
    break;
821
712
  }
822
5.71k
  case CE_CXXAllocator:
823
5.71k
    if (Opts.MayInlineCXXAllocator)
824
5.71k
      break;
825
0
    // Do not inline allocators until we model deallocators.
826
0
    // This is unfortunate, but basically necessary for smart pointers and such.
827
0
    return CIP_DisallowedAlways;
828
331
  case CE_ObjCMessage:
829
331
    if (!Opts.MayInlineObjCMethod)
830
1
      return CIP_DisallowedAlways;
831
330
    if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
832
330
          
Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate318
))
833
1
      return CIP_DisallowedAlways;
834
329
    break;
835
63.5k
  }
836
63.5k
837
63.5k
  return CIP_Allowed;
838
63.5k
}
839
840
/// Returns true if the given C++ class contains a member with the given name.
841
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
842
487
                      StringRef Name) {
843
487
  const IdentifierInfo &II = Ctx.Idents.get(Name);
844
487
  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
845
487
  if (!RD->lookup(DeclName).empty())
846
216
    return true;
847
271
848
271
  CXXBasePaths Paths(false, false, false);
849
271
  if (RD->lookupInBases(
850
271
          [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
851
30
            return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
852
30
          },
853
271
          Paths))
854
6
    return true;
855
265
856
265
  return false;
857
265
}
858
859
/// Returns true if the given C++ class is a container or iterator.
860
///
861
/// Our heuristic for this is whether it contains a method named 'begin()' or a
862
/// nested type named 'iterator' or 'iterator_category'.
863
275
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
864
275
  return hasMember(Ctx, RD, "begin") ||
865
275
         
hasMember(Ctx, RD, "iterator")159
||
866
275
         
hasMember(Ctx, RD, "iterator_category")53
;
867
275
}
868
869
/// Returns true if the given function refers to a method of a C++ container
870
/// or iterator.
871
///
872
/// We generally do a poor job modeling most containers right now, and might
873
/// prefer not to inline their methods.
874
static bool isContainerMethod(const ASTContext &Ctx,
875
463
                              const FunctionDecl *FD) {
876
463
  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
877
275
    return isContainerClass(Ctx, MD->getParent());
878
188
  return false;
879
188
}
880
881
/// Returns true if the given function is the destructor of a class named
882
/// "shared_ptr".
883
3.79k
static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
884
3.79k
  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
885
3.79k
  if (!Dtor)
886
3.50k
    return false;
887
297
888
297
  const CXXRecordDecl *RD = Dtor->getParent();
889
297
  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
890
297
    if (II->isStr("shared_ptr"))
891
8
        return true;
892
289
893
289
  return false;
894
289
}
895
896
/// Returns true if the function in \p CalleeADC may be inlined in general.
897
///
898
/// This checks static properties of the function, such as its signature and
899
/// CFG, to determine whether the analyzer should ever consider inlining it,
900
/// in any context.
901
4.80k
bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
902
4.80k
  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
903
4.80k
  // FIXME: Do not inline variadic calls.
904
4.80k
  if (CallEvent::isVariadic(CalleeADC->getDecl()))
905
4
    return false;
906
4.80k
907
4.80k
  // Check certain C++-related inlining policies.
908
4.80k
  ASTContext &Ctx = CalleeADC->getASTContext();
909
4.80k
  if (Ctx.getLangOpts().CPlusPlus) {
910
4.12k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
911
4.04k
      // Conditionally control the inlining of template functions.
912
4.04k
      if (!Opts.MayInlineTemplateFunctions)
913
5
        if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
914
4
          return false;
915
4.03k
916
4.03k
      // Conditionally control the inlining of C++ standard library functions.
917
4.03k
      if (!Opts.MayInlineCXXStandardLibrary)
918
19
        if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
919
19
          if (AnalysisDeclContext::isInStdNamespace(FD))
920
18
            return false;
921
4.01k
922
4.01k
      // Conditionally control the inlining of methods on objects that look
923
4.01k
      // like C++ containers.
924
4.01k
      if (!Opts.MayInlineCXXContainerMethods)
925
3.83k
        if (!AMgr.isInCodeFile(FD->getLocation()))
926
463
          if (isContainerMethod(Ctx, FD))
927
222
            return false;
928
3.79k
929
3.79k
      // Conditionally control the inlining of the destructor of C++ shared_ptr.
930
3.79k
      // We don't currently do a good job modeling shared_ptr because we can't
931
3.79k
      // see the reference count, so treating as opaque is probably the best
932
3.79k
      // idea.
933
3.79k
      if (!Opts.MayInlineCXXSharedPtrDtor)
934
3.79k
        if (isCXXSharedPtrDtor(FD))
935
8
          return false;
936
4.55k
    }
937
4.12k
  }
938
4.55k
939
4.55k
  // It is possible that the CFG cannot be constructed.
940
4.55k
  // Be safe, and check if the CalleeCFG is valid.
941
4.55k
  const CFG *CalleeCFG = CalleeADC->getCFG();
942
4.55k
  if (!CalleeCFG)
943
93
    return false;
944
4.46k
945
4.46k
  // Do not inline large functions.
946
4.46k
  if (isHuge(CalleeADC))
947
2
    return false;
948
4.45k
949
4.45k
  // It is possible that the live variables analysis cannot be
950
4.45k
  // run.  If so, bail out.
951
4.45k
  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
952
0
    return false;
953
4.45k
954
4.45k
  return true;
955
4.45k
}
956
957
bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
958
                                  const ExplodedNode *Pred,
959
97.3k
                                  const EvalCallOptions &CallOpts) {
960
97.3k
  if (!D)
961
30.8k
    return false;
962
66.4k
963
66.4k
  AnalysisManager &AMgr = getAnalysisManager();
964
66.4k
  AnalyzerOptions &Opts = AMgr.options;
965
66.4k
  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
966
66.4k
  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
967
66.4k
968
66.4k
  // The auto-synthesized bodies are essential to inline as they are
969
66.4k
  // usually small and commonly used. Note: we should do this check early on to
970
66.4k
  // ensure we always inline these calls.
971
66.4k
  if (CalleeADC->isBodyAutosynthesized())
972
409
    return true;
973
66.0k
974
66.0k
  if (!AMgr.shouldInlineCall())
975
26
    return false;
976
66.0k
977
66.0k
  // Check if this function has been marked as non-inlinable.
978
66.0k
  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
979
66.0k
  if (MayInline.hasValue()) {
980
61.2k
    if (!MayInline.getValue())
981
1.87k
      return false;
982
4.80k
983
4.80k
  } else {
984
4.80k
    // We haven't actually checked the static properties of this function yet.
985
4.80k
    // Do that now, and record our decision in the function summaries.
986
4.80k
    if (mayInlineDecl(CalleeADC)) {
987
4.45k
      Engine.FunctionSummaries->markMayInline(D);
988
4.45k
    } else {
989
351
      Engine.FunctionSummaries->markShouldNotInline(D);
990
351
      return false;
991
351
    }
992
63.8k
  }
993
63.8k
994
63.8k
  // Check if we should inline a call based on its kind.
995
63.8k
  // FIXME: this checks both static and dynamic properties of the call, which
996
63.8k
  // means we're redoing a bit of work that could be cached in the function
997
63.8k
  // summary.
998
63.8k
  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
999
63.8k
  if (CIP != CIP_Allowed) {
1000
308
    if (CIP == CIP_DisallowedAlways) {
1001
22
      assert(!MayInline.hasValue() || MayInline.getValue());
1002
22
      Engine.FunctionSummaries->markShouldNotInline(D);
1003
22
    }
1004
308
    return false;
1005
308
  }
1006
63.5k
1007
63.5k
  // Do not inline if recursive or we've reached max stack frame count.
1008
63.5k
  bool IsRecursive = false;
1009
63.5k
  unsigned StackDepth = 0;
1010
63.5k
  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1011
63.5k
  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1012
63.5k
      
(162
!isSmall(CalleeADC)162
||
IsRecursive153
))
1013
162
    return false;
1014
63.3k
1015
63.3k
  // Do not inline large functions too many times.
1016
63.3k
  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1017
63.3k
       Opts.MaxTimesInlineLarge) &&
1018
63.3k
      
isLarge(CalleeADC)48.9k
) {
1019
0
    NumReachedInlineCountMax++;
1020
0
    return false;
1021
0
  }
1022
63.3k
1023
63.3k
  if (HowToInline == Inline_Minimal && 
(25
!isSmall(CalleeADC)25
||
IsRecursive24
))
1024
1
    return false;
1025
63.3k
1026
63.3k
  return true;
1027
63.3k
}
1028
1029
97.4k
static bool isTrivialObjectAssignment(const CallEvent &Call) {
1030
97.4k
  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1031
97.4k
  if (!ICall)
1032
65.9k
    return false;
1033
31.5k
1034
31.5k
  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1035
31.5k
  if (!MD)
1036
1
    return false;
1037
31.5k
  if (!(MD->isCopyAssignmentOperator() || 
MD->isMoveAssignmentOperator()31.2k
))
1038
29.8k
    return false;
1039
1.65k
1040
1.65k
  return MD->isTrivial();
1041
1.65k
}
1042
1043
void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
1044
                                 const CallEvent &CallTemplate,
1045
97.4k
                                 const EvalCallOptions &CallOpts) {
1046
97.4k
  // Make sure we have the most recent state attached to the call.
1047
97.4k
  ProgramStateRef State = Pred->getState();
1048
97.4k
  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1049
97.4k
1050
97.4k
  // Special-case trivial assignment operators.
1051
97.4k
  if (isTrivialObjectAssignment(*Call)) {
1052
110
    performTrivialCopy(Bldr, Pred, *Call);
1053
110
    return;
1054
110
  }
1055
97.3k
1056
97.3k
  // Try to inline the call.
1057
97.3k
  // The origin expression here is just used as a kind of checksum;
1058
97.3k
  // this should still be safe even for CallEvents that don't come from exprs.
1059
97.3k
  const Expr *E = Call->getOriginExpr();
1060
97.3k
1061
97.3k
  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1062
97.3k
  if (InlinedFailedState) {
1063
32
    // If we already tried once and failed, make sure we don't retry later.
1064
32
    State = InlinedFailedState;
1065
97.3k
  } else {
1066
97.3k
    RuntimeDefinition RD = Call->getRuntimeDefinition();
1067
97.3k
    const Decl *D = RD.getDecl();
1068
97.3k
    if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1069
63.7k
      if (RD.mayHaveOtherDefinitions()) {
1070
18
        AnalyzerOptions &Options = getAnalysisManager().options;
1071
18
1072
18
        // Explore with and without inlining the call.
1073
18
        if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1074
15
          BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1075
15
          return;
1076
15
        }
1077
3
1078
3
        // Don't inline if we're not in any dynamic dispatch mode.
1079
3
        if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1080
3
          conservativeEvalCall(*Call, Bldr, Pred, State);
1081
3
          return;
1082
3
        }
1083
63.7k
      }
1084
63.7k
1085
63.7k
      // We are not bifurcating and we do have a Decl, so just inline.
1086
63.7k
      if (inlineCall(*Call, D, Bldr, Pred, State))
1087
63.7k
        return;
1088
33.6k
    }
1089
97.3k
  }
1090
33.6k
1091
33.6k
  // If we can't inline it, handle the return value and invalidate the regions.
1092
33.6k
  conservativeEvalCall(*Call, Bldr, Pred, State);
1093
33.6k
}
1094
1095
void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1096
                               const CallEvent &Call, const Decl *D,
1097
15
                               NodeBuilder &Bldr, ExplodedNode *Pred) {
1098
15
  assert(BifurReg);
1099
15
  BifurReg = BifurReg->StripCasts();
1100
15
1101
15
  // Check if we've performed the split already - note, we only want
1102
15
  // to split the path once per memory region.
1103
15
  ProgramStateRef State = Pred->getState();
1104
15
  const unsigned *BState =
1105
15
                        State->get<DynamicDispatchBifurcationMap>(BifurReg);
1106
15
  if (BState) {
1107
6
    // If we are on "inline path", keep inlining if possible.
1108
6
    if (*BState == DynamicDispatchModeInlined)
1109
3
      if (inlineCall(Call, D, Bldr, Pred, State))
1110
3
        return;
1111
3
    // If inline failed, or we are on the path where we assume we
1112
3
    // don't have enough info about the receiver to inline, conjure the
1113
3
    // return value and invalidate the regions.
1114
3
    conservativeEvalCall(Call, Bldr, Pred, State);
1115
3
    return;
1116
3
  }
1117
9
1118
9
  // If we got here, this is the first time we process a message to this
1119
9
  // region, so split the path.
1120
9
  ProgramStateRef IState =
1121
9
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1122
9
                                               DynamicDispatchModeInlined);
1123
9
  inlineCall(Call, D, Bldr, Pred, IState);
1124
9
1125
9
  ProgramStateRef NoIState =
1126
9
      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1127
9
                                               DynamicDispatchModeConservative);
1128
9
  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1129
9
1130
9
  NumOfDynamicDispatchPathSplits++;
1131
9
}
1132
1133
void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
1134
27.4k
                                 ExplodedNodeSet &Dst) {
1135
27.4k
  ExplodedNodeSet dstPreVisit;
1136
27.4k
  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1137
27.4k
1138
27.4k
  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1139
27.4k
1140
27.4k
  if (RS->getRetValue()) {
1141
24.9k
    for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1142
49.7k
                                  ei = dstPreVisit.end(); it != ei; 
++it24.7k
) {
1143
24.7k
      B.generateNode(RS, *it, (*it)->getState());
1144
24.7k
    }
1145
24.9k
  }
1146
27.4k
}