/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/StaticAnalyzer/Checkers/StackAddrEscapeChecker.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file defines stack address leak checker, which checks if an invalid |
10 | | // stack address is stored into a global or heap location. See CERT DCL30-C. |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "clang/AST/ExprCXX.h" |
15 | | #include "clang/Basic/SourceManager.h" |
16 | | #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" |
17 | | #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" |
18 | | #include "clang/StaticAnalyzer/Core/Checker.h" |
19 | | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
20 | | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
21 | | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" |
22 | | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" |
23 | | #include "llvm/ADT/SmallString.h" |
24 | | #include "llvm/Support/raw_ostream.h" |
25 | | using namespace clang; |
26 | | using namespace ento; |
27 | | |
28 | | namespace { |
29 | | class StackAddrEscapeChecker |
30 | | : public Checker<check::PreCall, check::PreStmt<ReturnStmt>, |
31 | | check::EndFunction> { |
32 | | mutable IdentifierInfo *dispatch_semaphore_tII = nullptr; |
33 | | mutable std::unique_ptr<BugType> BT_stackleak; |
34 | | mutable std::unique_ptr<BugType> BT_returnstack; |
35 | | mutable std::unique_ptr<BugType> BT_capturedstackasync; |
36 | | mutable std::unique_ptr<BugType> BT_capturedstackret; |
37 | | |
38 | | public: |
39 | | enum CheckKind { |
40 | | CK_StackAddrEscapeChecker, |
41 | | CK_StackAddrAsyncEscapeChecker, |
42 | | CK_NumCheckKinds |
43 | | }; |
44 | | |
45 | | bool ChecksEnabled[CK_NumCheckKinds] = {false}; |
46 | | CheckerNameRef CheckNames[CK_NumCheckKinds]; |
47 | | |
48 | | void checkPreCall(const CallEvent &Call, CheckerContext &C) const; |
49 | | void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const; |
50 | | void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const; |
51 | | |
52 | | private: |
53 | | void checkReturnedBlockCaptures(const BlockDataRegion &B, |
54 | | CheckerContext &C) const; |
55 | | void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B, |
56 | | CheckerContext &C) const; |
57 | | void EmitStackError(CheckerContext &C, const MemRegion *R, |
58 | | const Expr *RetE) const; |
59 | | bool isSemaphoreCaptured(const BlockDecl &B) const; |
60 | | static SourceRange genName(raw_ostream &os, const MemRegion *R, |
61 | | ASTContext &Ctx); |
62 | | static SmallVector<const MemRegion *, 4> |
63 | | getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C); |
64 | | static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C); |
65 | | }; |
66 | | } // namespace |
67 | | |
68 | | SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R, |
69 | 115 | ASTContext &Ctx) { |
70 | | // Get the base region, stripping away fields and elements. |
71 | 115 | R = R->getBaseRegion(); |
72 | 115 | SourceManager &SM = Ctx.getSourceManager(); |
73 | 115 | SourceRange range; |
74 | 115 | os << "Address of "; |
75 | | |
76 | | // Check if the region is a compound literal. |
77 | 115 | if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) { |
78 | 3 | const CompoundLiteralExpr *CL = CR->getLiteralExpr(); |
79 | 3 | os << "stack memory associated with a compound literal " |
80 | 3 | "declared on line " |
81 | 3 | << SM.getExpansionLineNumber(CL->getBeginLoc()) << " returned to caller"; |
82 | 3 | range = CL->getSourceRange(); |
83 | 112 | } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) { |
84 | 9 | const Expr *ARE = AR->getExpr(); |
85 | 9 | SourceLocation L = ARE->getBeginLoc(); |
86 | 9 | range = ARE->getSourceRange(); |
87 | 9 | os << "stack memory allocated by call to alloca() on line " |
88 | 9 | << SM.getExpansionLineNumber(L); |
89 | 103 | } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) { |
90 | 4 | const BlockDecl *BD = BR->getCodeRegion()->getDecl(); |
91 | 4 | SourceLocation L = BD->getBeginLoc(); |
92 | 4 | range = BD->getSourceRange(); |
93 | 4 | os << "stack-allocated block declared on line " |
94 | 4 | << SM.getExpansionLineNumber(L); |
95 | 99 | } else if (const auto *VR = dyn_cast<VarRegion>(R)) { |
96 | 66 | os << "stack memory associated with local variable '" << VR->getString() |
97 | 66 | << '\''; |
98 | 66 | range = VR->getDecl()->getSourceRange(); |
99 | 66 | } else if (const auto *33 LER33 = dyn_cast<CXXLifetimeExtendedObjectRegion>(R)) { |
100 | 3 | QualType Ty = LER->getValueType().getLocalUnqualifiedType(); |
101 | 3 | os << "stack memory associated with temporary object of type '"; |
102 | 3 | Ty.print(os, Ctx.getPrintingPolicy()); |
103 | 3 | os << "' lifetime extended by local variable"; |
104 | 3 | if (const IdentifierInfo *ID = LER->getExtendingDecl()->getIdentifier()) |
105 | 3 | os << " '" << ID->getName() << '\''; |
106 | 3 | range = LER->getExpr()->getSourceRange(); |
107 | 30 | } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) { |
108 | 30 | QualType Ty = TOR->getValueType().getLocalUnqualifiedType(); |
109 | 30 | os << "stack memory associated with temporary object of type '"; |
110 | 30 | Ty.print(os, Ctx.getPrintingPolicy()); |
111 | 30 | os << "'"; |
112 | 30 | range = TOR->getExpr()->getSourceRange(); |
113 | 30 | } else { |
114 | 0 | llvm_unreachable("Invalid region in ReturnStackAddressChecker."); |
115 | 0 | } |
116 | | |
117 | 115 | return range; |
118 | 115 | } |
119 | | |
120 | | bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R, |
121 | 6.66k | CheckerContext &C) { |
122 | 6.66k | const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace()); |
123 | 6.66k | return S->getStackFrame() != C.getStackFrame(); |
124 | 6.66k | } |
125 | | |
126 | 17 | bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const { |
127 | 17 | if (!dispatch_semaphore_tII) |
128 | 3 | dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t"); |
129 | 20 | for (const auto &C : B.captures()) { |
130 | 20 | const auto *T = C.getVariable()->getType()->getAs<TypedefType>(); |
131 | 20 | if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII1 ) |
132 | 1 | return true; |
133 | 20 | } |
134 | 16 | return false; |
135 | 17 | } |
136 | | |
137 | | SmallVector<const MemRegion *, 4> |
138 | | StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B, |
139 | 30 | CheckerContext &C) { |
140 | 30 | SmallVector<const MemRegion *, 4> Regions; |
141 | 31 | for (auto Var : B.referenced_vars()) { |
142 | 31 | SVal Val = C.getState()->getSVal(Var.getCapturedRegion()); |
143 | 31 | const MemRegion *Region = Val.getAsRegion(); |
144 | 31 | if (Region && isa<StackSpaceRegion>(Region->getMemorySpace())19 ) |
145 | 18 | Regions.push_back(Region); |
146 | 31 | } |
147 | 30 | return Regions; |
148 | 30 | } |
149 | | |
150 | | void StackAddrEscapeChecker::EmitStackError(CheckerContext &C, |
151 | | const MemRegion *R, |
152 | 35 | const Expr *RetE) const { |
153 | 35 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
154 | 35 | if (!N) |
155 | 1 | return; |
156 | 34 | if (!BT_returnstack) |
157 | 10 | BT_returnstack = std::make_unique<BugType>( |
158 | 10 | CheckNames[CK_StackAddrEscapeChecker], |
159 | 10 | "Return of address to stack-allocated memory"); |
160 | | // Generate a report for this bug. |
161 | 34 | SmallString<128> buf; |
162 | 34 | llvm::raw_svector_ostream os(buf); |
163 | 34 | SourceRange range = genName(os, R, C.getASTContext()); |
164 | 34 | os << " returned to caller"; |
165 | 34 | auto report = |
166 | 34 | std::make_unique<PathSensitiveBugReport>(*BT_returnstack, os.str(), N); |
167 | 34 | report->addRange(RetE->getSourceRange()); |
168 | 34 | if (range.isValid()) |
169 | 34 | report->addRange(range); |
170 | 34 | C.emitReport(std::move(report)); |
171 | 34 | } |
172 | | |
173 | | void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures( |
174 | 17 | const BlockDataRegion &B, CheckerContext &C) const { |
175 | | // There is a not-too-uncommon idiom |
176 | | // where a block passed to dispatch_async captures a semaphore |
177 | | // and then the thread (which called dispatch_async) is blocked on waiting |
178 | | // for the completion of the execution of the block |
179 | | // via dispatch_semaphore_wait. To avoid false-positives (for now) |
180 | | // we ignore all the blocks which have captured |
181 | | // a variable of the type "dispatch_semaphore_t". |
182 | 17 | if (isSemaphoreCaptured(*B.getDecl())) |
183 | 1 | return; |
184 | 16 | for (const MemRegion *Region : getCapturedStackRegions(B, C)) { |
185 | | // The block passed to dispatch_async may capture another block |
186 | | // created on the stack. However, there is no leak in this situaton, |
187 | | // no matter if ARC or no ARC is enabled: |
188 | | // dispatch_async copies the passed "outer" block (via Block_copy) |
189 | | // and if the block has captured another "inner" block, |
190 | | // the "inner" block will be copied as well. |
191 | 13 | if (isa<BlockDataRegion>(Region)) |
192 | 1 | continue; |
193 | 12 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
194 | 12 | if (!N) |
195 | 0 | continue; |
196 | 12 | if (!BT_capturedstackasync) |
197 | 2 | BT_capturedstackasync = std::make_unique<BugType>( |
198 | 2 | CheckNames[CK_StackAddrAsyncEscapeChecker], |
199 | 2 | "Address of stack-allocated memory is captured"); |
200 | 12 | SmallString<128> Buf; |
201 | 12 | llvm::raw_svector_ostream Out(Buf); |
202 | 12 | SourceRange Range = genName(Out, Region, C.getASTContext()); |
203 | 12 | Out << " is captured by an asynchronously-executed block"; |
204 | 12 | auto Report = std::make_unique<PathSensitiveBugReport>( |
205 | 12 | *BT_capturedstackasync, Out.str(), N); |
206 | 12 | if (Range.isValid()) |
207 | 12 | Report->addRange(Range); |
208 | 12 | C.emitReport(std::move(Report)); |
209 | 12 | } |
210 | 16 | } |
211 | | |
212 | | void StackAddrEscapeChecker::checkReturnedBlockCaptures( |
213 | 14 | const BlockDataRegion &B, CheckerContext &C) const { |
214 | 14 | for (const MemRegion *Region : getCapturedStackRegions(B, C)) { |
215 | 5 | if (isNotInCurrentFrame(Region, C)) |
216 | 1 | continue; |
217 | 4 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
218 | 4 | if (!N) |
219 | 0 | continue; |
220 | 4 | if (!BT_capturedstackret) |
221 | 3 | BT_capturedstackret = std::make_unique<BugType>( |
222 | 3 | CheckNames[CK_StackAddrEscapeChecker], |
223 | 3 | "Address of stack-allocated memory is captured"); |
224 | 4 | SmallString<128> Buf; |
225 | 4 | llvm::raw_svector_ostream Out(Buf); |
226 | 4 | SourceRange Range = genName(Out, Region, C.getASTContext()); |
227 | 4 | Out << " is captured by a returned block"; |
228 | 4 | auto Report = std::make_unique<PathSensitiveBugReport>(*BT_capturedstackret, |
229 | 4 | Out.str(), N); |
230 | 4 | if (Range.isValid()) |
231 | 4 | Report->addRange(Range); |
232 | 4 | C.emitReport(std::move(Report)); |
233 | 4 | } |
234 | 14 | } |
235 | | |
236 | | void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call, |
237 | 102k | CheckerContext &C) const { |
238 | 102k | if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker]) |
239 | 99.7k | return; |
240 | 2.49k | if (!Call.isGlobalCFunction("dispatch_after") && |
241 | 2.49k | !Call.isGlobalCFunction("dispatch_async")2.49k ) |
242 | 2.48k | return; |
243 | 53 | for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); 17 Idx < NumArgs; ++Idx36 ) { |
244 | 36 | if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>( |
245 | 36 | Call.getArgSVal(Idx).getAsRegion())) |
246 | 17 | checkAsyncExecutedBlockCaptures(*B, C); |
247 | 36 | } |
248 | 17 | } |
249 | | |
250 | | void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS, |
251 | 23.3k | CheckerContext &C) const { |
252 | 23.3k | if (!ChecksEnabled[CK_StackAddrEscapeChecker]) |
253 | 0 | return; |
254 | | |
255 | 23.3k | const Expr *RetE = RS->getRetValue(); |
256 | 23.3k | if (!RetE) |
257 | 3.06k | return; |
258 | 20.2k | RetE = RetE->IgnoreParens(); |
259 | | |
260 | 20.2k | SVal V = C.getSVal(RetE); |
261 | 20.2k | const MemRegion *R = V.getAsRegion(); |
262 | 20.2k | if (!R) |
263 | 11.7k | return; |
264 | | |
265 | 8.51k | if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R)) |
266 | 14 | checkReturnedBlockCaptures(*B, C); |
267 | | |
268 | 8.51k | if (!isa<StackSpaceRegion>(R->getMemorySpace()) || isNotInCurrentFrame(R, C)6.64k ) |
269 | 8.47k | return; |
270 | | |
271 | | // Returning a record by value is fine. (In this case, the returned |
272 | | // expression will be a copy-constructor, possibly wrapped in an |
273 | | // ExprWithCleanups node.) |
274 | 39 | if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE)) |
275 | 13 | RetE = Cleanup->getSubExpr(); |
276 | 39 | if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType()0 ) |
277 | 0 | return; |
278 | | |
279 | | // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied |
280 | | // so the stack address is not escaping here. |
281 | 39 | if (const auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) { |
282 | 19 | if (isa<BlockDataRegion>(R) && |
283 | 19 | ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject6 ) { |
284 | 4 | return; |
285 | 4 | } |
286 | 19 | } |
287 | | |
288 | 35 | EmitStackError(C, R, RetE); |
289 | 35 | } |
290 | | |
291 | | void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS, |
292 | 63.1k | CheckerContext &Ctx) const { |
293 | 63.1k | if (!ChecksEnabled[CK_StackAddrEscapeChecker]) |
294 | 0 | return; |
295 | | |
296 | 63.1k | ProgramStateRef State = Ctx.getState(); |
297 | | |
298 | | // Iterate over all bindings to global variables and see if it contains |
299 | | // a memory region in the stack space. |
300 | 63.1k | class CallBack : public StoreManager::BindingsHandler { |
301 | 63.1k | private: |
302 | 63.1k | CheckerContext &Ctx; |
303 | 63.1k | const StackFrameContext *PoppedFrame; |
304 | | |
305 | | /// Look for stack variables referring to popped stack variables. |
306 | | /// Returns true only if it found some dangling stack variables |
307 | | /// referred by an other stack variable from different stack frame. |
308 | 63.1k | bool checkForDanglingStackVariable(const MemRegion *Referrer, |
309 | 87.3k | const MemRegion *Referred) { |
310 | 87.3k | const auto *ReferrerMemSpace = |
311 | 87.3k | Referrer->getMemorySpace()->getAs<StackSpaceRegion>(); |
312 | 87.3k | const auto *ReferredMemSpace = |
313 | 87.3k | Referred->getMemorySpace()->getAs<StackSpaceRegion>(); |
314 | | |
315 | 87.3k | if (!ReferrerMemSpace || !ReferredMemSpace86.4k ) |
316 | 29.7k | return false; |
317 | | |
318 | 57.6k | const auto *ReferrerFrame = ReferrerMemSpace->getStackFrame(); |
319 | 57.6k | const auto *ReferredFrame = ReferredMemSpace->getStackFrame(); |
320 | | |
321 | 57.6k | if (ReferrerMemSpace && ReferredMemSpace) { |
322 | 57.6k | if (ReferredFrame == PoppedFrame && |
323 | 57.6k | ReferrerFrame->isParentOf(PoppedFrame)119 ) { |
324 | 54 | V.emplace_back(Referrer, Referred); |
325 | 54 | return true; |
326 | 54 | } |
327 | 57.6k | } |
328 | 57.5k | return false; |
329 | 57.6k | } |
330 | | |
331 | 63.1k | public: |
332 | 63.1k | SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V; |
333 | | |
334 | 63.1k | CallBack(CheckerContext &CC) : Ctx(CC), PoppedFrame(CC.getStackFrame()) {} |
335 | | |
336 | 63.1k | bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region, |
337 | 183k | SVal Val) override { |
338 | 183k | const MemRegion *VR = Val.getAsRegion(); |
339 | 183k | if (!VR) |
340 | 95.7k | return true; |
341 | | |
342 | 87.3k | if (checkForDanglingStackVariable(Region, VR)) |
343 | 54 | return true; |
344 | | |
345 | | // Check the globals for the same. |
346 | 87.2k | if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace())) |
347 | 87.1k | return true; |
348 | 139 | if (VR && VR->hasStackStorage() && !isNotInCurrentFrame(VR, Ctx)11 ) |
349 | 11 | V.emplace_back(Region, VR); |
350 | 139 | return true; |
351 | 87.2k | } |
352 | 63.1k | }; |
353 | | |
354 | 63.1k | CallBack Cb(Ctx); |
355 | 63.1k | State->getStateManager().getStoreManager().iterBindings(State->getStore(), |
356 | 63.1k | Cb); |
357 | | |
358 | 63.1k | if (Cb.V.empty()) |
359 | 63.0k | return; |
360 | | |
361 | | // Generate an error node. |
362 | 48 | ExplodedNode *N = Ctx.generateNonFatalErrorNode(State); |
363 | 48 | if (!N) |
364 | 0 | return; |
365 | | |
366 | 48 | if (!BT_stackleak) |
367 | 12 | BT_stackleak = |
368 | 12 | std::make_unique<BugType>(CheckNames[CK_StackAddrEscapeChecker], |
369 | 12 | "Stack address stored into global variable"); |
370 | | |
371 | 65 | for (const auto &P : Cb.V) { |
372 | 65 | const MemRegion *Referrer = P.first; |
373 | 65 | const MemRegion *Referred = P.second; |
374 | | |
375 | | // Generate a report for this bug. |
376 | 65 | const StringRef CommonSuffix = |
377 | 65 | "upon returning to the caller. This will be a dangling reference"; |
378 | 65 | SmallString<128> Buf; |
379 | 65 | llvm::raw_svector_ostream Out(Buf); |
380 | 65 | const SourceRange Range = genName(Out, Referred, Ctx.getASTContext()); |
381 | | |
382 | 65 | if (isa<CXXTempObjectRegion, CXXLifetimeExtendedObjectRegion>(Referrer)) { |
383 | 12 | Out << " is still referred to by a temporary object on the stack " |
384 | 12 | << CommonSuffix; |
385 | 12 | auto Report = |
386 | 12 | std::make_unique<PathSensitiveBugReport>(*BT_stackleak, Out.str(), N); |
387 | 12 | Ctx.emitReport(std::move(Report)); |
388 | 12 | return; |
389 | 12 | } |
390 | | |
391 | 53 | const StringRef ReferrerMemorySpace = [](const MemSpaceRegion *Space) { |
392 | 53 | if (isa<StaticGlobalSpaceRegion>(Space)) |
393 | 4 | return "static"; |
394 | 49 | if (isa<GlobalsSpaceRegion>(Space)) |
395 | 7 | return "global"; |
396 | 42 | assert(isa<StackSpaceRegion>(Space)); |
397 | 42 | return "stack"; |
398 | 42 | }(Referrer->getMemorySpace()); |
399 | | |
400 | | // This cast supposed to succeed. |
401 | 53 | const VarRegion *ReferrerVar = cast<VarRegion>(Referrer->getBaseRegion()); |
402 | 53 | const std::string ReferrerVarName = |
403 | 53 | ReferrerVar->getDecl()->getDeclName().getAsString(); |
404 | | |
405 | 53 | Out << " is still referred to by the " << ReferrerMemorySpace |
406 | 53 | << " variable '" << ReferrerVarName << "' " << CommonSuffix; |
407 | 53 | auto Report = |
408 | 53 | std::make_unique<PathSensitiveBugReport>(*BT_stackleak, Out.str(), N); |
409 | 53 | if (Range.isValid()) |
410 | 53 | Report->addRange(Range); |
411 | | |
412 | 53 | Ctx.emitReport(std::move(Report)); |
413 | 53 | } |
414 | 48 | } |
415 | | |
416 | 1.27k | void ento::registerStackAddrEscapeBase(CheckerManager &mgr) { |
417 | 1.27k | mgr.registerChecker<StackAddrEscapeChecker>(); |
418 | 1.27k | } |
419 | | |
420 | 5.25k | bool ento::shouldRegisterStackAddrEscapeBase(const CheckerManager &mgr) { |
421 | 5.25k | return true; |
422 | 5.25k | } |
423 | | |
424 | | #define REGISTER_CHECKER(name) \ |
425 | 1.35k | void ento::register##name(CheckerManager &Mgr) { \ |
426 | 1.35k | StackAddrEscapeChecker *Chk = Mgr.getChecker<StackAddrEscapeChecker>(); \ |
427 | 1.35k | Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \ |
428 | 1.35k | Chk->CheckNames[StackAddrEscapeChecker::CK_##name] = \ |
429 | 1.35k | Mgr.getCurrentCheckerName(); \ |
430 | 1.35k | } \ clang::ento::registerStackAddrEscapeChecker(clang::ento::CheckerManager&) Line | Count | Source | 425 | 1.27k | void ento::register##name(CheckerManager &Mgr) { \ | 426 | 1.27k | StackAddrEscapeChecker *Chk = Mgr.getChecker<StackAddrEscapeChecker>(); \ | 427 | 1.27k | Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \ | 428 | 1.27k | Chk->CheckNames[StackAddrEscapeChecker::CK_##name] = \ | 429 | 1.27k | Mgr.getCurrentCheckerName(); \ | 430 | 1.27k | } \ |
clang::ento::registerStackAddrAsyncEscapeChecker(clang::ento::CheckerManager&) Line | Count | Source | 425 | 77 | void ento::register##name(CheckerManager &Mgr) { \ | 426 | 77 | StackAddrEscapeChecker *Chk = Mgr.getChecker<StackAddrEscapeChecker>(); \ | 427 | 77 | Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \ | 428 | 77 | Chk->CheckNames[StackAddrEscapeChecker::CK_##name] = \ | 429 | 77 | Mgr.getCurrentCheckerName(); \ | 430 | 77 | } \ |
|
431 | | \ |
432 | 2.70k | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } clang::ento::shouldRegisterStackAddrEscapeChecker(clang::ento::CheckerManager const&) Line | Count | Source | 432 | 2.55k | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } |
clang::ento::shouldRegisterStackAddrAsyncEscapeChecker(clang::ento::CheckerManager const&) Line | Count | Source | 432 | 154 | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } |
|
433 | | |
434 | | REGISTER_CHECKER(StackAddrEscapeChecker) |
435 | | REGISTER_CHECKER(StackAddrAsyncEscapeChecker) |