/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/StaticAnalyzer/Core/ExprEngineCXX.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file defines the C++ expression evaluation engine. |
10 | | // |
11 | | //===----------------------------------------------------------------------===// |
12 | | |
13 | | #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" |
14 | | #include "clang/Analysis/ConstructionContext.h" |
15 | | #include "clang/AST/DeclCXX.h" |
16 | | #include "clang/AST/StmtCXX.h" |
17 | | #include "clang/AST/ParentMap.h" |
18 | | #include "clang/Basic/PrettyStackTrace.h" |
19 | | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
20 | | #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" |
21 | | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
22 | | |
23 | | using namespace clang; |
24 | | using namespace ento; |
25 | | |
26 | | void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, |
27 | | ExplodedNode *Pred, |
28 | 9.00k | ExplodedNodeSet &Dst) { |
29 | 9.00k | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
30 | 9.00k | const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); |
31 | 9.00k | ProgramStateRef state = Pred->getState(); |
32 | 9.00k | const LocationContext *LCtx = Pred->getLocationContext(); |
33 | | |
34 | 9.00k | state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); |
35 | 9.00k | Bldr.generateNode(ME, Pred, state); |
36 | 9.00k | } |
37 | | |
38 | | // FIXME: This is the sort of code that should eventually live in a Core |
39 | | // checker rather than as a special case in ExprEngine. |
40 | | void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, |
41 | 12.6k | const CallEvent &Call) { |
42 | 12.6k | SVal ThisVal; |
43 | 12.6k | bool AlwaysReturnsLValue; |
44 | 12.6k | const CXXRecordDecl *ThisRD = nullptr; |
45 | 12.6k | if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { |
46 | 12.5k | assert(Ctor->getDecl()->isTrivial()); |
47 | 0 | assert(Ctor->getDecl()->isCopyOrMoveConstructor()); |
48 | 0 | ThisVal = Ctor->getCXXThisVal(); |
49 | 12.5k | ThisRD = Ctor->getDecl()->getParent(); |
50 | 12.5k | AlwaysReturnsLValue = false; |
51 | 110 | } else { |
52 | 110 | assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); |
53 | 0 | assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == |
54 | 110 | OO_Equal); |
55 | 0 | ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); |
56 | 110 | ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); |
57 | 110 | AlwaysReturnsLValue = true; |
58 | 110 | } |
59 | | |
60 | 0 | assert(ThisRD); |
61 | 12.6k | if (ThisRD->isEmpty()) { |
62 | | // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal |
63 | | // and bind it and RegionStore would think that the actual value |
64 | | // in this region at this offset is unknown. |
65 | 1.74k | return; |
66 | 1.74k | } |
67 | | |
68 | 10.9k | const LocationContext *LCtx = Pred->getLocationContext(); |
69 | | |
70 | 10.9k | ExplodedNodeSet Dst; |
71 | 10.9k | Bldr.takeNodes(Pred); |
72 | | |
73 | 10.9k | SVal V = Call.getArgSVal(0); |
74 | | |
75 | | // If the value being copied is not unknown, load from its location to get |
76 | | // an aggregate rvalue. |
77 | 10.9k | if (Optional<Loc> L = V.getAs<Loc>()) |
78 | 10.9k | V = Pred->getState()->getSVal(*L); |
79 | 10 | else |
80 | 10 | assert(V.isUnknownOrUndef()); |
81 | | |
82 | 0 | const Expr *CallExpr = Call.getOriginExpr(); |
83 | 10.9k | evalBind(Dst, CallExpr, Pred, ThisVal, V, true); |
84 | | |
85 | 10.9k | PostStmt PS(CallExpr, LCtx); |
86 | 10.9k | for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); |
87 | 21.8k | I != E; ++I10.9k ) { |
88 | 10.9k | ProgramStateRef State = (*I)->getState(); |
89 | 10.9k | if (AlwaysReturnsLValue) |
90 | 79 | State = State->BindExpr(CallExpr, LCtx, ThisVal); |
91 | 10.8k | else |
92 | 10.8k | State = bindReturnValue(Call, LCtx, State); |
93 | 10.9k | Bldr.generateNode(PS, State, *I); |
94 | 10.9k | } |
95 | 10.9k | } |
96 | | |
97 | | |
98 | | SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue, |
99 | 10.6k | QualType &Ty, bool &IsArray) { |
100 | 10.6k | SValBuilder &SVB = State->getStateManager().getSValBuilder(); |
101 | 10.6k | ASTContext &Ctx = SVB.getContext(); |
102 | | |
103 | 10.7k | while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { |
104 | 138 | Ty = AT->getElementType(); |
105 | 138 | LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue); |
106 | 138 | IsArray = true; |
107 | 138 | } |
108 | | |
109 | 10.6k | return LValue; |
110 | 10.6k | } |
111 | | |
112 | | SVal ExprEngine::computeObjectUnderConstruction( |
113 | | const Expr *E, ProgramStateRef State, const LocationContext *LCtx, |
114 | 33.0k | const ConstructionContext *CC, EvalCallOptions &CallOpts) { |
115 | 33.0k | SValBuilder &SVB = getSValBuilder(); |
116 | 33.0k | MemRegionManager &MRMgr = SVB.getRegionManager(); |
117 | 33.0k | ASTContext &ACtx = SVB.getContext(); |
118 | | |
119 | | // Compute the target region by exploring the construction context. |
120 | 33.0k | if (CC) { |
121 | 32.7k | switch (CC->getKind()) { |
122 | 26 | case ConstructionContext::CXX17ElidedCopyVariableKind: |
123 | 7.38k | case ConstructionContext::SimpleVariableKind: { |
124 | 7.38k | const auto *DSCC = cast<VariableConstructionContext>(CC); |
125 | 7.38k | const auto *DS = DSCC->getDeclStmt(); |
126 | 7.38k | const auto *Var = cast<VarDecl>(DS->getSingleDecl()); |
127 | 7.38k | QualType Ty = Var->getType(); |
128 | 7.38k | return makeZeroElementRegion(State, State->getLValue(Var, LCtx), Ty, |
129 | 7.38k | CallOpts.IsArrayCtorOrDtor); |
130 | 26 | } |
131 | 4 | case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: |
132 | 2.58k | case ConstructionContext::SimpleConstructorInitializerKind: { |
133 | 2.58k | const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); |
134 | 2.58k | const auto *Init = ICC->getCXXCtorInitializer(); |
135 | 2.58k | const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); |
136 | 2.58k | Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); |
137 | 2.58k | SVal ThisVal = State->getSVal(ThisPtr); |
138 | 2.58k | if (Init->isBaseInitializer()) { |
139 | 2 | const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion()); |
140 | 2 | const CXXRecordDecl *BaseClass = |
141 | 2 | Init->getBaseClass()->getAsCXXRecordDecl(); |
142 | 2 | const auto *BaseReg = |
143 | 2 | MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg, |
144 | 2 | Init->isBaseVirtual()); |
145 | 2 | return SVB.makeLoc(BaseReg); |
146 | 2 | } |
147 | 2.57k | if (Init->isDelegatingInitializer()) |
148 | 1 | return ThisVal; |
149 | | |
150 | 2.57k | const ValueDecl *Field; |
151 | 2.57k | SVal FieldVal; |
152 | 2.57k | if (Init->isIndirectMemberInitializer()) { |
153 | 0 | Field = Init->getIndirectMember(); |
154 | 0 | FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); |
155 | 2.57k | } else { |
156 | 2.57k | Field = Init->getMember(); |
157 | 2.57k | FieldVal = State->getLValue(Init->getMember(), ThisVal); |
158 | 2.57k | } |
159 | | |
160 | 2.57k | QualType Ty = Field->getType(); |
161 | 2.57k | return makeZeroElementRegion(State, FieldVal, Ty, |
162 | 2.57k | CallOpts.IsArrayCtorOrDtor); |
163 | 2.57k | } |
164 | 490 | case ConstructionContext::NewAllocatedObjectKind: { |
165 | 490 | if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
166 | 487 | const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); |
167 | 487 | const auto *NE = NECC->getCXXNewExpr(); |
168 | 487 | SVal V = *getObjectUnderConstruction(State, NE, LCtx); |
169 | 487 | if (const SubRegion *MR = |
170 | 475 | dyn_cast_or_null<SubRegion>(V.getAsRegion())) { |
171 | 475 | if (NE->isArray()) { |
172 | | // TODO: In fact, we need to call the constructor for every |
173 | | // allocated element, not just the first one! |
174 | 41 | CallOpts.IsArrayCtorOrDtor = true; |
175 | 41 | return loc::MemRegionVal(getStoreManager().GetElementZeroRegion( |
176 | 41 | MR, NE->getType()->getPointeeType())); |
177 | 41 | } |
178 | 434 | return V; |
179 | 475 | } |
180 | | // TODO: Detect when the allocator returns a null pointer. |
181 | | // Constructor shall not be called in this case. |
182 | 487 | } |
183 | 15 | break; |
184 | 490 | } |
185 | 2.41k | case ConstructionContext::SimpleReturnedValueKind: |
186 | 2.45k | case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { |
187 | | // The temporary is to be managed by the parent stack frame. |
188 | | // So build it in the parent stack frame if we're not in the |
189 | | // top frame of the analysis. |
190 | 2.45k | const StackFrameContext *SFC = LCtx->getStackFrame(); |
191 | 2.45k | if (const LocationContext *CallerLCtx = SFC->getParent()) { |
192 | 2.39k | auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] |
193 | 2.39k | .getAs<CFGCXXRecordTypedCall>(); |
194 | 2.39k | if (!RTC) { |
195 | | // We were unable to find the correct construction context for the |
196 | | // call in the parent stack frame. This is equivalent to not being |
197 | | // able to find construction context at all. |
198 | 331 | break; |
199 | 331 | } |
200 | 2.06k | if (isa<BlockInvocationContext>(CallerLCtx)) { |
201 | | // Unwrap block invocation contexts. They're mostly part of |
202 | | // the current stack frame. |
203 | 1 | CallerLCtx = CallerLCtx->getParent(); |
204 | 1 | assert(!isa<BlockInvocationContext>(CallerLCtx)); |
205 | 1 | } |
206 | 0 | return computeObjectUnderConstruction( |
207 | 2.06k | cast<Expr>(SFC->getCallSite()), State, CallerLCtx, |
208 | 2.06k | RTC->getConstructionContext(), CallOpts); |
209 | 57 | } else { |
210 | | // We are on the top frame of the analysis. We do not know where is the |
211 | | // object returned to. Conjure a symbolic region for the return value. |
212 | | // TODO: We probably need a new MemRegion kind to represent the storage |
213 | | // of that SymbolicRegion, so that we cound produce a fancy symbol |
214 | | // instead of an anonymous conjured symbol. |
215 | | // TODO: Do we need to track the region to avoid having it dead |
216 | | // too early? It does die too early, at least in C++17, but because |
217 | | // putting anything into a SymbolicRegion causes an immediate escape, |
218 | | // it doesn't cause any leak false positives. |
219 | 57 | const auto *RCC = cast<ReturnedValueConstructionContext>(CC); |
220 | | // Make sure that this doesn't coincide with any other symbol |
221 | | // conjured for the returned expression. |
222 | 57 | static const int TopLevelSymRegionTag = 0; |
223 | 57 | const Expr *RetE = RCC->getReturnStmt()->getRetValue(); |
224 | 57 | assert(RetE && "Void returns should not have a construction context"); |
225 | 0 | QualType ReturnTy = RetE->getType(); |
226 | 57 | QualType RegionTy = ACtx.getPointerType(ReturnTy); |
227 | 57 | return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy, |
228 | 57 | currBldrCtx->blockCount()); |
229 | 57 | } |
230 | 2.45k | llvm_unreachable0 ("Unhandled return value construction context!"); |
231 | 2.45k | } |
232 | 6.61k | case ConstructionContext::ElidedTemporaryObjectKind: { |
233 | 6.61k | assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); |
234 | 0 | const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); |
235 | | |
236 | | // Support pre-C++17 copy elision. We'll have the elidable copy |
237 | | // constructor in the AST and in the CFG, but we'll skip it |
238 | | // and construct directly into the final object. This call |
239 | | // also sets the CallOpts flags for us. |
240 | | // If the elided copy/move constructor is not supported, there's still |
241 | | // benefit in trying to model the non-elided constructor. |
242 | | // Stash our state before trying to elide, as it'll get overwritten. |
243 | 6.61k | ProgramStateRef PreElideState = State; |
244 | 6.61k | EvalCallOptions PreElideCallOpts = CallOpts; |
245 | | |
246 | 6.61k | SVal V = computeObjectUnderConstruction( |
247 | 6.61k | TCC->getConstructorAfterElision(), State, LCtx, |
248 | 6.61k | TCC->getConstructionContextAfterElision(), CallOpts); |
249 | | |
250 | | // FIXME: This definition of "copy elision has not failed" is unreliable. |
251 | | // It doesn't indicate that the constructor will actually be inlined |
252 | | // later; this is still up to evalCall() to decide. |
253 | 6.61k | if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) |
254 | 6.48k | return V; |
255 | | |
256 | | // Copy elision failed. Revert the changes and proceed as if we have |
257 | | // a simple temporary. |
258 | 131 | CallOpts = PreElideCallOpts; |
259 | 131 | CallOpts.IsElidableCtorThatHasNotBeenElided = true; |
260 | 131 | LLVM_FALLTHROUGH; |
261 | 131 | } |
262 | 2.08k | case ConstructionContext::SimpleTemporaryObjectKind: { |
263 | 2.08k | const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); |
264 | 2.08k | const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); |
265 | | |
266 | 2.08k | CallOpts.IsTemporaryCtorOrDtor = true; |
267 | 2.08k | if (MTE) { |
268 | 1.96k | if (const ValueDecl *VD = MTE->getExtendingDecl()) { |
269 | 183 | assert(MTE->getStorageDuration() != SD_FullExpression); |
270 | 183 | if (!VD->getType()->isReferenceType()) { |
271 | | // We're lifetime-extended by a surrounding aggregate. |
272 | | // Automatic destructors aren't quite working in this case |
273 | | // on the CFG side. We should warn the caller about that. |
274 | | // FIXME: Is there a better way to retrieve this information from |
275 | | // the MaterializeTemporaryExpr? |
276 | 24 | CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; |
277 | 24 | } |
278 | 183 | } |
279 | | |
280 | 1.96k | if (MTE->getStorageDuration() == SD_Static || |
281 | 1.95k | MTE->getStorageDuration() == SD_Thread) |
282 | 14 | return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); |
283 | 1.96k | } |
284 | | |
285 | 2.06k | return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); |
286 | 2.08k | } |
287 | 11.2k | case ConstructionContext::ArgumentKind: { |
288 | | // Arguments are technically temporaries. |
289 | 11.2k | CallOpts.IsTemporaryCtorOrDtor = true; |
290 | | |
291 | 11.2k | const auto *ACC = cast<ArgumentConstructionContext>(CC); |
292 | 11.2k | const Expr *E = ACC->getCallLikeExpr(); |
293 | 11.2k | unsigned Idx = ACC->getIndex(); |
294 | | |
295 | 11.2k | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
296 | 11.2k | auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> { |
297 | 11.2k | const LocationContext *FutureSFC = |
298 | 11.2k | Caller->getCalleeStackFrame(currBldrCtx->blockCount()); |
299 | | // Return early if we are unable to reliably foresee |
300 | | // the future stack frame. |
301 | 11.2k | if (!FutureSFC) |
302 | 0 | return None; |
303 | | |
304 | | // This should be equivalent to Caller->getDecl() for now, but |
305 | | // FutureSFC->getDecl() is likely to support better stuff (like |
306 | | // virtual functions) earlier. |
307 | 11.2k | const Decl *CalleeD = FutureSFC->getDecl(); |
308 | | |
309 | | // FIXME: Support for variadic arguments is not implemented here yet. |
310 | 11.2k | if (CallEvent::isVariadic(CalleeD)) |
311 | 0 | return None; |
312 | | |
313 | | // Operator arguments do not correspond to operator parameters |
314 | | // because this-argument is implemented as a normal argument in |
315 | | // operator call expressions but not in operator declarations. |
316 | 11.2k | const TypedValueRegion *TVR = Caller->getParameterLocation( |
317 | 11.2k | *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount()); |
318 | 11.2k | if (!TVR) |
319 | 0 | return None; |
320 | | |
321 | 11.2k | return loc::MemRegionVal(TVR); |
322 | 11.2k | }; |
323 | | |
324 | 11.2k | if (const auto *CE = dyn_cast<CallExpr>(E)) { |
325 | 11.1k | CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); |
326 | 11.1k | if (Optional<SVal> V = getArgLoc(Caller)) |
327 | 11.1k | return *V; |
328 | 0 | else |
329 | 0 | break; |
330 | 58 | } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { |
331 | | // Don't bother figuring out the target region for the future |
332 | | // constructor because we won't need it. |
333 | 48 | CallEventRef<> Caller = |
334 | 48 | CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); |
335 | 48 | if (Optional<SVal> V = getArgLoc(Caller)) |
336 | 48 | return *V; |
337 | 0 | else |
338 | 0 | break; |
339 | 10 | } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { |
340 | 10 | CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); |
341 | 10 | if (Optional<SVal> V = getArgLoc(Caller)) |
342 | 10 | return *V; |
343 | 0 | else |
344 | 0 | break; |
345 | 10 | } |
346 | 11.2k | } |
347 | 32.7k | } // switch (CC->getKind()) |
348 | 32.7k | } |
349 | | |
350 | | // If we couldn't find an existing region to construct into, assume we're |
351 | | // constructing a temporary. Notify the caller of our failure. |
352 | 645 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
353 | 645 | return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); |
354 | 33.0k | } |
355 | | |
356 | | ProgramStateRef ExprEngine::updateObjectsUnderConstruction( |
357 | | SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, |
358 | 32.8k | const ConstructionContext *CC, const EvalCallOptions &CallOpts) { |
359 | 32.8k | if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { |
360 | | // Sounds like we failed to find the target region and therefore |
361 | | // copy elision failed. There's nothing we can do about it here. |
362 | 514 | return State; |
363 | 514 | } |
364 | | |
365 | | // See if we're constructing an existing region by looking at the |
366 | | // current construction context. |
367 | 32.3k | assert(CC && "Computed target region without construction context?"); |
368 | 0 | switch (CC->getKind()) { |
369 | 26 | case ConstructionContext::CXX17ElidedCopyVariableKind: |
370 | 7.37k | case ConstructionContext::SimpleVariableKind: { |
371 | 7.37k | const auto *DSCC = cast<VariableConstructionContext>(CC); |
372 | 7.37k | return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V); |
373 | 26 | } |
374 | 4 | case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: |
375 | 2.57k | case ConstructionContext::SimpleConstructorInitializerKind: { |
376 | 2.57k | const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); |
377 | 2.57k | const auto *Init = ICC->getCXXCtorInitializer(); |
378 | | // Base and delegating initializers handled above |
379 | 2.57k | assert(Init->isAnyMemberInitializer() && |
380 | 2.57k | "Base and delegating initializers should have been handled by" |
381 | 2.57k | "computeObjectUnderConstruction()"); |
382 | 0 | return addObjectUnderConstruction(State, Init, LCtx, V); |
383 | 4 | } |
384 | 475 | case ConstructionContext::NewAllocatedObjectKind: { |
385 | 475 | return State; |
386 | 4 | } |
387 | 2.07k | case ConstructionContext::SimpleReturnedValueKind: |
388 | 2.11k | case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { |
389 | 2.11k | const StackFrameContext *SFC = LCtx->getStackFrame(); |
390 | 2.11k | const LocationContext *CallerLCtx = SFC->getParent(); |
391 | 2.11k | if (!CallerLCtx) { |
392 | | // No extra work is necessary in top frame. |
393 | 57 | return State; |
394 | 57 | } |
395 | | |
396 | 2.06k | auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] |
397 | 2.06k | .getAs<CFGCXXRecordTypedCall>(); |
398 | 2.06k | assert(RTC && "Could not have had a target region without it"); |
399 | 2.06k | if (isa<BlockInvocationContext>(CallerLCtx)) { |
400 | | // Unwrap block invocation contexts. They're mostly part of |
401 | | // the current stack frame. |
402 | 1 | CallerLCtx = CallerLCtx->getParent(); |
403 | 1 | assert(!isa<BlockInvocationContext>(CallerLCtx)); |
404 | 1 | } |
405 | | |
406 | 0 | return updateObjectsUnderConstruction(V, |
407 | 2.06k | cast<Expr>(SFC->getCallSite()), State, CallerLCtx, |
408 | 2.06k | RTC->getConstructionContext(), CallOpts); |
409 | 2.11k | } |
410 | 6.61k | case ConstructionContext::ElidedTemporaryObjectKind: { |
411 | 6.61k | assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); |
412 | 6.61k | if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { |
413 | 6.48k | const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); |
414 | 6.48k | State = updateObjectsUnderConstruction( |
415 | 6.48k | V, TCC->getConstructorAfterElision(), State, LCtx, |
416 | 6.48k | TCC->getConstructionContextAfterElision(), CallOpts); |
417 | | |
418 | | // Remember that we've elided the constructor. |
419 | 6.48k | State = addObjectUnderConstruction( |
420 | 6.48k | State, TCC->getConstructorAfterElision(), LCtx, V); |
421 | | |
422 | | // Remember that we've elided the destructor. |
423 | 6.48k | if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) |
424 | 588 | State = elideDestructor(State, BTE, LCtx); |
425 | | |
426 | | // Instead of materialization, shamelessly return |
427 | | // the final object destination. |
428 | 6.48k | if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) |
429 | 6.48k | State = addObjectUnderConstruction(State, MTE, LCtx, V); |
430 | | |
431 | 6.48k | return State; |
432 | 6.48k | } |
433 | | // If we decided not to elide the constructor, proceed as if |
434 | | // it's a simple temporary. |
435 | 131 | LLVM_FALLTHROUGH; |
436 | 131 | } |
437 | 2.08k | case ConstructionContext::SimpleTemporaryObjectKind: { |
438 | 2.08k | const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); |
439 | 2.08k | if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) |
440 | 350 | State = addObjectUnderConstruction(State, BTE, LCtx, V); |
441 | | |
442 | 2.08k | if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) |
443 | 1.96k | State = addObjectUnderConstruction(State, MTE, LCtx, V); |
444 | | |
445 | 2.08k | return State; |
446 | 131 | } |
447 | 11.2k | case ConstructionContext::ArgumentKind: { |
448 | 11.2k | const auto *ACC = cast<ArgumentConstructionContext>(CC); |
449 | 11.2k | if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) |
450 | 293 | State = addObjectUnderConstruction(State, BTE, LCtx, V); |
451 | | |
452 | 11.2k | return addObjectUnderConstruction( |
453 | 11.2k | State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V); |
454 | 131 | } |
455 | 32.3k | } |
456 | 32.3k | llvm_unreachable0 ("Unhandled construction context!"); |
457 | 32.3k | } |
458 | | |
459 | | void ExprEngine::handleConstructor(const Expr *E, |
460 | | ExplodedNode *Pred, |
461 | 29.5k | ExplodedNodeSet &destNodes) { |
462 | 29.5k | const auto *CE = dyn_cast<CXXConstructExpr>(E); |
463 | 29.5k | const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); |
464 | 29.5k | assert(CE || CIE); |
465 | | |
466 | 0 | const LocationContext *LCtx = Pred->getLocationContext(); |
467 | 29.5k | ProgramStateRef State = Pred->getState(); |
468 | | |
469 | 29.5k | SVal Target = UnknownVal(); |
470 | | |
471 | 29.5k | if (CE) { |
472 | 29.5k | if (Optional<SVal> ElidedTarget = |
473 | 6.48k | getObjectUnderConstruction(State, CE, LCtx)) { |
474 | | // We've previously modeled an elidable constructor by pretending that it |
475 | | // in fact constructs into the correct target. This constructor can |
476 | | // therefore be skipped. |
477 | 6.48k | Target = *ElidedTarget; |
478 | 6.48k | StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); |
479 | 6.48k | State = finishObjectConstruction(State, CE, LCtx); |
480 | 6.48k | if (auto L = Target.getAs<Loc>()) |
481 | 6.48k | State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); |
482 | 6.48k | Bldr.generateNode(CE, Pred, State); |
483 | 6.48k | return; |
484 | 6.48k | } |
485 | 29.5k | } |
486 | | |
487 | | // FIXME: Handle arrays, which run the same constructor for every element. |
488 | | // For now, we just run the first constructor (which should still invalidate |
489 | | // the entire array). |
490 | | |
491 | 23.0k | EvalCallOptions CallOpts; |
492 | 23.0k | auto C = getCurrentCFGElement().getAs<CFGConstructor>(); |
493 | 23.0k | assert(C || getCurrentCFGElement().getAs<CFGStmt>()); |
494 | 23.0k | const ConstructionContext *CC = C ? C->getConstructionContext()22.7k : nullptr328 ; |
495 | | |
496 | 23.0k | const CXXConstructExpr::ConstructionKind CK = |
497 | 23.0k | CE ? CE->getConstructionKind()23.0k : CIE->getConstructionKind()5 ; |
498 | 23.0k | switch (CK) { |
499 | 22.2k | case CXXConstructExpr::CK_Complete: { |
500 | | // Inherited constructors are always base class constructors. |
501 | 22.2k | assert(CE && !CIE && "A complete constructor is inherited?!"); |
502 | | |
503 | | // The target region is found from construction context. |
504 | 0 | std::tie(State, Target) = |
505 | 22.2k | handleConstructionContext(CE, State, LCtx, CC, CallOpts); |
506 | 22.2k | break; |
507 | 0 | } |
508 | 116 | case CXXConstructExpr::CK_VirtualBase: { |
509 | | // Make sure we are not calling virtual base class initializers twice. |
510 | | // Only the most-derived object should initialize virtual base classes. |
511 | 116 | const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( |
512 | 116 | LCtx->getStackFrame()->getCallSite()); |
513 | 116 | assert( |
514 | 116 | (!OuterCtor || |
515 | 116 | OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || |
516 | 116 | OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && |
517 | 116 | ("This virtual base should have already been initialized by " |
518 | 116 | "the most derived class!")); |
519 | 0 | (void)OuterCtor; |
520 | 116 | LLVM_FALLTHROUGH; |
521 | 116 | } |
522 | 761 | case CXXConstructExpr::CK_NonVirtualBase: |
523 | | // In C++17, classes with non-virtual bases may be aggregates, so they would |
524 | | // be initialized as aggregates without a constructor call, so we may have |
525 | | // a base class constructed directly into an initializer list without |
526 | | // having the derived-class constructor call on the previous stack frame. |
527 | | // Initializer lists may be nested into more initializer lists that |
528 | | // correspond to surrounding aggregate initializations. |
529 | | // FIXME: For now this code essentially bails out. We need to find the |
530 | | // correct target region and set it. |
531 | | // FIXME: Instead of relying on the ParentMap, we should have the |
532 | | // trigger-statement (InitListExpr in this case) passed down from CFG or |
533 | | // otherwise always available during construction. |
534 | 761 | if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(E))) { |
535 | 24 | MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); |
536 | 24 | Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); |
537 | 24 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
538 | 24 | break; |
539 | 24 | } |
540 | 737 | LLVM_FALLTHROUGH; |
541 | 748 | case CXXConstructExpr::CK_Delegating: { |
542 | 748 | const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); |
543 | 748 | Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, |
544 | 748 | LCtx->getStackFrame()); |
545 | 748 | SVal ThisVal = State->getSVal(ThisPtr); |
546 | | |
547 | 748 | if (CK == CXXConstructExpr::CK_Delegating) { |
548 | 11 | Target = ThisVal; |
549 | 737 | } else { |
550 | | // Cast to the base type. |
551 | 737 | bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); |
552 | 737 | SVal BaseVal = |
553 | 737 | getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); |
554 | 737 | Target = BaseVal; |
555 | 737 | } |
556 | 748 | break; |
557 | 737 | } |
558 | 23.0k | } |
559 | | |
560 | 23.0k | if (State != Pred->getState()) { |
561 | 21.2k | static SimpleProgramPointTag T("ExprEngine", |
562 | 21.2k | "Prepare for object construction"); |
563 | 21.2k | ExplodedNodeSet DstPrepare; |
564 | 21.2k | StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); |
565 | 21.2k | BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); |
566 | 21.2k | assert(DstPrepare.size() <= 1); |
567 | 21.2k | if (DstPrepare.size() == 0) |
568 | 27 | return; |
569 | 21.2k | Pred = *BldrPrepare.begin(); |
570 | 21.2k | } |
571 | | |
572 | 23.0k | const MemRegion *TargetRegion = Target.getAsRegion(); |
573 | 23.0k | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
574 | 23.0k | CallEventRef<> Call = |
575 | 23.0k | CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( |
576 | 5 | CIE, TargetRegion, State, LCtx) |
577 | 23.0k | : (CallEventRef<>)CEMgr.getCXXConstructorCall( |
578 | 23.0k | CE, TargetRegion, State, LCtx); |
579 | | |
580 | 23.0k | ExplodedNodeSet DstPreVisit; |
581 | 23.0k | getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); |
582 | | |
583 | 23.0k | ExplodedNodeSet PreInitialized; |
584 | 23.0k | if (CE) { |
585 | | // FIXME: Is it possible and/or useful to do this before PreStmt? |
586 | 23.0k | StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); |
587 | 23.0k | for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), |
588 | 23.0k | E = DstPreVisit.end(); |
589 | 46.0k | I != E; ++I23.0k ) { |
590 | 23.0k | ProgramStateRef State = (*I)->getState(); |
591 | 23.0k | if (CE->requiresZeroInitialization()) { |
592 | | // FIXME: Once we properly handle constructors in new-expressions, we'll |
593 | | // need to invalidate the region before setting a default value, to make |
594 | | // sure there aren't any lingering bindings around. This probably needs |
595 | | // to happen regardless of whether or not the object is zero-initialized |
596 | | // to handle random fields of a placement-initialized object picking up |
597 | | // old bindings. We might only want to do it when we need to, though. |
598 | | // FIXME: This isn't actually correct for arrays -- we need to zero- |
599 | | // initialize the entire array, not just the first element -- but our |
600 | | // handling of arrays everywhere else is weak as well, so this shouldn't |
601 | | // actually make things worse. Placement new makes this tricky as well, |
602 | | // since it's then possible to be initializing one part of a multi- |
603 | | // dimensional array. |
604 | 1.95k | State = State->bindDefaultZero(Target, LCtx); |
605 | 1.95k | } |
606 | | |
607 | 23.0k | Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, |
608 | 23.0k | ProgramPoint::PreStmtKind); |
609 | 23.0k | } |
610 | 5 | } else { |
611 | 5 | PreInitialized = DstPreVisit; |
612 | 5 | } |
613 | | |
614 | 23.0k | ExplodedNodeSet DstPreCall; |
615 | 23.0k | getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, |
616 | 23.0k | *Call, *this); |
617 | | |
618 | 23.0k | ExplodedNodeSet DstEvaluated; |
619 | | |
620 | 23.0k | if (CE && CE->getConstructor()->isTrivial()23.0k && |
621 | 15.3k | CE->getConstructor()->isCopyOrMoveConstructor() && |
622 | 12.5k | !CallOpts.IsArrayCtorOrDtor) { |
623 | 12.5k | StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); |
624 | | // FIXME: Handle other kinds of trivial constructors as well. |
625 | 12.5k | for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); |
626 | 25.1k | I != E; ++I12.5k ) |
627 | 12.5k | performTrivialCopy(Bldr, *I, *Call); |
628 | | |
629 | 10.4k | } else { |
630 | 10.4k | for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); |
631 | 20.9k | I != E; ++I10.4k ) |
632 | 10.4k | getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this, |
633 | 10.4k | CallOpts); |
634 | 10.4k | } |
635 | | |
636 | | // If the CFG was constructed without elements for temporary destructors |
637 | | // and the just-called constructor created a temporary object then |
638 | | // stop exploration if the temporary object has a noreturn constructor. |
639 | | // This can lose coverage because the destructor, if it were present |
640 | | // in the CFG, would be called at the end of the full expression or |
641 | | // later (for life-time extended temporaries) -- but avoids infeasible |
642 | | // paths when no-return temporary destructors are used for assertions. |
643 | 23.0k | ExplodedNodeSet DstEvaluatedPostProcessed; |
644 | 23.0k | StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); |
645 | 23.0k | const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); |
646 | 23.0k | if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { |
647 | 283 | if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && |
648 | 110 | cast<CXXConstructorDecl>(Call->getDecl()) |
649 | 110 | ->getParent() |
650 | 4 | ->isAnyDestructorNoReturn()) { |
651 | | |
652 | | // If we've inlined the constructor, then DstEvaluated would be empty. |
653 | | // In this case we still want a sink, which could be implemented |
654 | | // in processCallExit. But we don't have that implemented at the moment, |
655 | | // so if you hit this assertion, see if you can avoid inlining |
656 | | // the respective constructor when analyzer-config cfg-temporary-dtors |
657 | | // is set to false. |
658 | | // Otherwise there's nothing wrong with inlining such constructor. |
659 | 4 | assert(!DstEvaluated.empty() && |
660 | 4 | "We should not have inlined this constructor!"); |
661 | | |
662 | 4 | for (ExplodedNode *N : DstEvaluated) { |
663 | 4 | Bldr.generateSink(E, N, N->getState()); |
664 | 4 | } |
665 | | |
666 | | // There is no need to run the PostCall and PostStmt checker |
667 | | // callbacks because we just generated sinks on all nodes in th |
668 | | // frontier. |
669 | 4 | return; |
670 | 4 | } |
671 | 283 | } |
672 | | |
673 | 23.0k | ExplodedNodeSet DstPostArgumentCleanup; |
674 | 23.0k | for (ExplodedNode *I : DstEvaluatedPostProcessed) |
675 | 14.2k | finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); |
676 | | |
677 | | // If there were other constructors called for object-type arguments |
678 | | // of this constructor, clean them up. |
679 | 23.0k | ExplodedNodeSet DstPostCall; |
680 | 23.0k | getCheckerManager().runCheckersForPostCall(DstPostCall, |
681 | 23.0k | DstPostArgumentCleanup, |
682 | 23.0k | *Call, *this); |
683 | 23.0k | getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); |
684 | 23.0k | } |
685 | | |
686 | | void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, |
687 | | ExplodedNode *Pred, |
688 | 29.5k | ExplodedNodeSet &Dst) { |
689 | 29.5k | handleConstructor(CE, Pred, Dst); |
690 | 29.5k | } |
691 | | |
692 | | void ExprEngine::VisitCXXInheritedCtorInitExpr( |
693 | | const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, |
694 | 5 | ExplodedNodeSet &Dst) { |
695 | 5 | handleConstructor(CE, Pred, Dst); |
696 | 5 | } |
697 | | |
698 | | void ExprEngine::VisitCXXDestructor(QualType ObjectType, |
699 | | const MemRegion *Dest, |
700 | | const Stmt *S, |
701 | | bool IsBaseDtor, |
702 | | ExplodedNode *Pred, |
703 | | ExplodedNodeSet &Dst, |
704 | 1.50k | EvalCallOptions &CallOpts) { |
705 | 1.50k | assert(S && "A destructor without a trigger!"); |
706 | 0 | const LocationContext *LCtx = Pred->getLocationContext(); |
707 | 1.50k | ProgramStateRef State = Pred->getState(); |
708 | | |
709 | 1.50k | const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); |
710 | 1.50k | assert(RecordDecl && "Only CXXRecordDecls should have destructors"); |
711 | 0 | const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); |
712 | | // FIXME: There should always be a Decl, otherwise the destructor call |
713 | | // shouldn't have been added to the CFG in the first place. |
714 | 1.50k | if (!DtorDecl) { |
715 | | // Skip the invalid destructor. We cannot simply return because |
716 | | // it would interrupt the analysis instead. |
717 | 1 | static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); |
718 | | // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. |
719 | 1 | PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); |
720 | 1 | NodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
721 | 1 | Bldr.generateNode(PP, Pred->getState(), Pred); |
722 | 1 | return; |
723 | 1 | } |
724 | | |
725 | 1.50k | if (!Dest) { |
726 | | // We're trying to destroy something that is not a region. This may happen |
727 | | // for a variety of reasons (unknown target region, concrete integer instead |
728 | | // of target region, etc.). The current code makes an attempt to recover. |
729 | | // FIXME: We probably don't really need to recover when we're dealing |
730 | | // with concrete integers specifically. |
731 | 70 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
732 | 70 | if (const Expr *E = dyn_cast_or_null<Expr>(S)) { |
733 | 68 | Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); |
734 | 2 | } else { |
735 | 2 | static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); |
736 | 2 | NodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
737 | 2 | Bldr.generateSink(Pred->getLocation().withTag(&T), |
738 | 2 | Pred->getState(), Pred); |
739 | 2 | return; |
740 | 2 | } |
741 | 70 | } |
742 | | |
743 | 1.50k | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
744 | 1.50k | CallEventRef<CXXDestructorCall> Call = |
745 | 1.50k | CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); |
746 | | |
747 | 1.50k | PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), |
748 | 1.50k | Call->getSourceRange().getBegin(), |
749 | 1.50k | "Error evaluating destructor"); |
750 | | |
751 | 1.50k | ExplodedNodeSet DstPreCall; |
752 | 1.50k | getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, |
753 | 1.50k | *Call, *this); |
754 | | |
755 | 1.50k | ExplodedNodeSet DstInvalidated; |
756 | 1.50k | StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); |
757 | 1.50k | for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); |
758 | 2.99k | I != E; ++I1.49k ) |
759 | 1.49k | defaultEvalCall(Bldr, *I, *Call, CallOpts); |
760 | | |
761 | 1.50k | getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, |
762 | 1.50k | *Call, *this); |
763 | 1.50k | } |
764 | | |
765 | | void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, |
766 | | ExplodedNode *Pred, |
767 | 1.02k | ExplodedNodeSet &Dst) { |
768 | 1.02k | ProgramStateRef State = Pred->getState(); |
769 | 1.02k | const LocationContext *LCtx = Pred->getLocationContext(); |
770 | 1.02k | PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), |
771 | 1.02k | CNE->getBeginLoc(), |
772 | 1.02k | "Error evaluating New Allocator Call"); |
773 | 1.02k | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
774 | 1.02k | CallEventRef<CXXAllocatorCall> Call = |
775 | 1.02k | CEMgr.getCXXAllocatorCall(CNE, State, LCtx); |
776 | | |
777 | 1.02k | ExplodedNodeSet DstPreCall; |
778 | 1.02k | getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, |
779 | 1.02k | *Call, *this); |
780 | | |
781 | 1.02k | ExplodedNodeSet DstPostCall; |
782 | 1.02k | StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); |
783 | 1.01k | for (ExplodedNode *I : DstPreCall) { |
784 | | // FIXME: Provide evalCall for checkers? |
785 | 1.01k | defaultEvalCall(CallBldr, I, *Call); |
786 | 1.01k | } |
787 | | // If the call is inlined, DstPostCall will be empty and we bail out now. |
788 | | |
789 | | // Store return value of operator new() for future use, until the actual |
790 | | // CXXNewExpr gets processed. |
791 | 1.02k | ExplodedNodeSet DstPostValue; |
792 | 1.02k | StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); |
793 | 652 | for (ExplodedNode *I : DstPostCall) { |
794 | | // FIXME: Because CNE serves as the "call site" for the allocator (due to |
795 | | // lack of a better expression in the AST), the conjured return value symbol |
796 | | // is going to be of the same type (C++ object pointer type). Technically |
797 | | // this is not correct because the operator new's prototype always says that |
798 | | // it returns a 'void *'. So we should change the type of the symbol, |
799 | | // and then evaluate the cast over the symbolic pointer from 'void *' to |
800 | | // the object pointer type. But without changing the symbol's type it |
801 | | // is breaking too much to evaluate the no-op symbolic cast over it, so we |
802 | | // skip it for now. |
803 | 652 | ProgramStateRef State = I->getState(); |
804 | 652 | SVal RetVal = State->getSVal(CNE, LCtx); |
805 | | |
806 | | // If this allocation function is not declared as non-throwing, failures |
807 | | // /must/ be signalled by exceptions, and thus the return value will never |
808 | | // be NULL. -fno-exceptions does not influence this semantics. |
809 | | // FIXME: GCC has a -fcheck-new option, which forces it to consider the case |
810 | | // where new can return NULL. If we end up supporting that option, we can |
811 | | // consider adding a check for it here. |
812 | | // C++11 [basic.stc.dynamic.allocation]p3. |
813 | 652 | if (const FunctionDecl *FD = CNE->getOperatorNew()) { |
814 | 652 | QualType Ty = FD->getType(); |
815 | 652 | if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) |
816 | 652 | if (!ProtoType->isNothrow()) |
817 | 628 | State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); |
818 | 652 | } |
819 | | |
820 | 652 | ValueBldr.generateNode( |
821 | 652 | CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); |
822 | 652 | } |
823 | | |
824 | 1.02k | ExplodedNodeSet DstPostPostCallCallback; |
825 | 1.02k | getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, |
826 | 1.02k | DstPostValue, *Call, *this); |
827 | 652 | for (ExplodedNode *I : DstPostPostCallCallback) { |
828 | 652 | getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); |
829 | 652 | } |
830 | 1.02k | } |
831 | | |
832 | | void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, |
833 | 994 | ExplodedNodeSet &Dst) { |
834 | | // FIXME: Much of this should eventually migrate to CXXAllocatorCall. |
835 | | // Also, we need to decide how allocators actually work -- they're not |
836 | | // really part of the CXXNewExpr because they happen BEFORE the |
837 | | // CXXConstructExpr subexpression. See PR12014 for some discussion. |
838 | | |
839 | 994 | unsigned blockCount = currBldrCtx->blockCount(); |
840 | 994 | const LocationContext *LCtx = Pred->getLocationContext(); |
841 | 994 | SVal symVal = UnknownVal(); |
842 | 994 | FunctionDecl *FD = CNE->getOperatorNew(); |
843 | | |
844 | 994 | bool IsStandardGlobalOpNewFunction = |
845 | 994 | FD->isReplaceableGlobalAllocationFunction(); |
846 | | |
847 | 994 | ProgramStateRef State = Pred->getState(); |
848 | | |
849 | | // Retrieve the stored operator new() return value. |
850 | 994 | if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
851 | 987 | symVal = *getObjectUnderConstruction(State, CNE, LCtx); |
852 | 987 | State = finishObjectConstruction(State, CNE, LCtx); |
853 | 987 | } |
854 | | |
855 | | // We assume all standard global 'operator new' functions allocate memory in |
856 | | // heap. We realize this is an approximation that might not correctly model |
857 | | // a custom global allocator. |
858 | 994 | if (symVal.isUnknown()) { |
859 | 7 | if (IsStandardGlobalOpNewFunction) |
860 | 5 | symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); |
861 | 2 | else |
862 | 2 | symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), |
863 | 2 | blockCount); |
864 | 7 | } |
865 | | |
866 | 994 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
867 | 994 | CallEventRef<CXXAllocatorCall> Call = |
868 | 994 | CEMgr.getCXXAllocatorCall(CNE, State, LCtx); |
869 | | |
870 | 994 | if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
871 | | // Invalidate placement args. |
872 | | // FIXME: Once we figure out how we want allocators to work, |
873 | | // we should be using the usual pre-/(default-)eval-/post-call checkers |
874 | | // here. |
875 | 7 | State = Call->invalidateRegions(blockCount); |
876 | 7 | if (!State) |
877 | 0 | return; |
878 | | |
879 | | // If this allocation function is not declared as non-throwing, failures |
880 | | // /must/ be signalled by exceptions, and thus the return value will never |
881 | | // be NULL. -fno-exceptions does not influence this semantics. |
882 | | // FIXME: GCC has a -fcheck-new option, which forces it to consider the case |
883 | | // where new can return NULL. If we end up supporting that option, we can |
884 | | // consider adding a check for it here. |
885 | | // C++11 [basic.stc.dynamic.allocation]p3. |
886 | 7 | if (FD) { |
887 | 7 | QualType Ty = FD->getType(); |
888 | 7 | if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) |
889 | 7 | if (!ProtoType->isNothrow()) |
890 | 3 | if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) |
891 | 3 | State = State->assume(*dSymVal, true); |
892 | 7 | } |
893 | 7 | } |
894 | | |
895 | 994 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
896 | | |
897 | 994 | SVal Result = symVal; |
898 | | |
899 | 994 | if (CNE->isArray()) { |
900 | | // FIXME: allocating an array requires simulating the constructors. |
901 | | // For now, just return a symbolicated region. |
902 | 126 | if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { |
903 | 121 | QualType ObjTy = CNE->getType()->getPointeeType(); |
904 | 121 | const ElementRegion *EleReg = |
905 | 121 | getStoreManager().GetElementZeroRegion(NewReg, ObjTy); |
906 | 121 | Result = loc::MemRegionVal(EleReg); |
907 | 121 | } |
908 | 126 | State = State->BindExpr(CNE, Pred->getLocationContext(), Result); |
909 | 126 | Bldr.generateNode(CNE, Pred, State); |
910 | 126 | return; |
911 | 126 | } |
912 | | |
913 | | // FIXME: Once we have proper support for CXXConstructExprs inside |
914 | | // CXXNewExpr, we need to make sure that the constructed object is not |
915 | | // immediately invalidated here. (The placement call should happen before |
916 | | // the constructor call anyway.) |
917 | 868 | if (FD && FD->isReservedGlobalPlacementOperator()) { |
918 | | // Non-array placement new should always return the placement location. |
919 | 295 | SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); |
920 | 295 | Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), |
921 | 295 | CNE->getPlacementArg(0)->getType()); |
922 | 295 | } |
923 | | |
924 | | // Bind the address of the object, then check to see if we cached out. |
925 | 868 | State = State->BindExpr(CNE, LCtx, Result); |
926 | 868 | ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); |
927 | 868 | if (!NewN) |
928 | 0 | return; |
929 | | |
930 | | // If the type is not a record, we won't have a CXXConstructExpr as an |
931 | | // initializer. Copy the value over. |
932 | 868 | if (const Expr *Init = CNE->getInitializer()) { |
933 | 618 | if (!isa<CXXConstructExpr>(Init)) { |
934 | 175 | assert(Bldr.getResults().size() == 1); |
935 | 0 | Bldr.takeNodes(NewN); |
936 | 175 | evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), |
937 | 175 | /*FirstInit=*/IsStandardGlobalOpNewFunction); |
938 | 175 | } |
939 | 618 | } |
940 | 868 | } |
941 | | |
942 | | void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, |
943 | 436 | ExplodedNode *Pred, ExplodedNodeSet &Dst) { |
944 | | |
945 | 436 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
946 | 436 | CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( |
947 | 436 | CDE, Pred->getState(), Pred->getLocationContext()); |
948 | | |
949 | 436 | ExplodedNodeSet DstPreCall; |
950 | 436 | getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); |
951 | | |
952 | 436 | getCheckerManager().runCheckersForPostCall(Dst, DstPreCall, *Call, *this); |
953 | 436 | } |
954 | | |
955 | | void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, |
956 | 0 | ExplodedNodeSet &Dst) { |
957 | 0 | const VarDecl *VD = CS->getExceptionDecl(); |
958 | 0 | if (!VD) { |
959 | 0 | Dst.Add(Pred); |
960 | 0 | return; |
961 | 0 | } |
962 | | |
963 | 0 | const LocationContext *LCtx = Pred->getLocationContext(); |
964 | 0 | SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), |
965 | 0 | currBldrCtx->blockCount()); |
966 | 0 | ProgramStateRef state = Pred->getState(); |
967 | 0 | state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); |
968 | |
|
969 | 0 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
970 | 0 | Bldr.generateNode(CS, Pred, state); |
971 | 0 | } |
972 | | |
973 | | void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, |
974 | 9.57k | ExplodedNodeSet &Dst) { |
975 | 9.57k | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
976 | | |
977 | | // Get the this object region from StoreManager. |
978 | 9.57k | const LocationContext *LCtx = Pred->getLocationContext(); |
979 | 9.57k | const MemRegion *R = |
980 | 9.57k | svalBuilder.getRegionManager().getCXXThisRegion( |
981 | 9.57k | getContext().getCanonicalType(TE->getType()), |
982 | 9.57k | LCtx); |
983 | | |
984 | 9.57k | ProgramStateRef state = Pred->getState(); |
985 | 9.57k | SVal V = state->getSVal(loc::MemRegionVal(R)); |
986 | 9.57k | Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); |
987 | 9.57k | } |
988 | | |
989 | | void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, |
990 | 197 | ExplodedNodeSet &Dst) { |
991 | 197 | const LocationContext *LocCtxt = Pred->getLocationContext(); |
992 | | |
993 | | // Get the region of the lambda itself. |
994 | 197 | const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( |
995 | 197 | LE, LocCtxt); |
996 | 197 | SVal V = loc::MemRegionVal(R); |
997 | | |
998 | 197 | ProgramStateRef State = Pred->getState(); |
999 | | |
1000 | | // If we created a new MemRegion for the lambda, we should explicitly bind |
1001 | | // the captures. |
1002 | 197 | CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); |
1003 | 197 | for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), |
1004 | 197 | e = LE->capture_init_end(); |
1005 | 398 | i != e; ++i, ++CurField201 ) { |
1006 | 201 | FieldDecl *FieldForCapture = *CurField; |
1007 | 201 | SVal FieldLoc = State->getLValue(FieldForCapture, V); |
1008 | | |
1009 | 201 | SVal InitVal; |
1010 | 201 | if (!FieldForCapture->hasCapturedVLAType()) { |
1011 | 199 | Expr *InitExpr = *i; |
1012 | 199 | assert(InitExpr && "Capture missing initialization expression"); |
1013 | 0 | InitVal = State->getSVal(InitExpr, LocCtxt); |
1014 | 2 | } else { |
1015 | | // The field stores the length of a captured variable-length array. |
1016 | | // These captures don't have initialization expressions; instead we |
1017 | | // get the length from the VLAType size expression. |
1018 | 2 | Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); |
1019 | 2 | InitVal = State->getSVal(SizeExpr, LocCtxt); |
1020 | 2 | } |
1021 | | |
1022 | 0 | State = State->bindLoc(FieldLoc, InitVal, LocCtxt); |
1023 | 201 | } |
1024 | | |
1025 | | // Decay the Loc into an RValue, because there might be a |
1026 | | // MaterializeTemporaryExpr node above this one which expects the bound value |
1027 | | // to be an RValue. |
1028 | 197 | SVal LambdaRVal = State->getSVal(R); |
1029 | | |
1030 | 197 | ExplodedNodeSet Tmp; |
1031 | 197 | StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); |
1032 | | // FIXME: is this the right program point kind? |
1033 | 197 | Bldr.generateNode(LE, Pred, |
1034 | 197 | State->BindExpr(LE, LocCtxt, LambdaRVal), |
1035 | 197 | nullptr, ProgramPoint::PostLValueKind); |
1036 | | |
1037 | | // FIXME: Move all post/pre visits to ::Visit(). |
1038 | 197 | getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); |
1039 | 197 | } |