/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Analysis/UninitializedValues.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- UninitializedValues.cpp - Find Uninitialized Values ----------------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file implements uninitialized values analysis for source-level CFGs. |
10 | | // |
11 | | //===----------------------------------------------------------------------===// |
12 | | |
13 | | #include "clang/Analysis/Analyses/UninitializedValues.h" |
14 | | #include "clang/AST/Attr.h" |
15 | | #include "clang/AST/Decl.h" |
16 | | #include "clang/AST/DeclBase.h" |
17 | | #include "clang/AST/Expr.h" |
18 | | #include "clang/AST/OperationKinds.h" |
19 | | #include "clang/AST/Stmt.h" |
20 | | #include "clang/AST/StmtObjC.h" |
21 | | #include "clang/AST/StmtVisitor.h" |
22 | | #include "clang/AST/Type.h" |
23 | | #include "clang/Analysis/Analyses/PostOrderCFGView.h" |
24 | | #include "clang/Analysis/AnalysisDeclContext.h" |
25 | | #include "clang/Analysis/CFG.h" |
26 | | #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h" |
27 | | #include "clang/Analysis/FlowSensitive/DataflowWorklist.h" |
28 | | #include "clang/Basic/LLVM.h" |
29 | | #include "llvm/ADT/BitVector.h" |
30 | | #include "llvm/ADT/DenseMap.h" |
31 | | #include "llvm/ADT/None.h" |
32 | | #include "llvm/ADT/Optional.h" |
33 | | #include "llvm/ADT/PackedVector.h" |
34 | | #include "llvm/ADT/SmallBitVector.h" |
35 | | #include "llvm/ADT/SmallVector.h" |
36 | | #include "llvm/Support/Casting.h" |
37 | | #include <algorithm> |
38 | | #include <cassert> |
39 | | |
40 | | using namespace clang; |
41 | | |
42 | | #define DEBUG_LOGGING 0 |
43 | | |
44 | 140k | static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { |
45 | 140k | if (vd->isLocalVarDecl() && !vd->hasGlobalStorage()21.1k && |
46 | 140k | !vd->isExceptionVariable()21.0k && !vd->isInitCapture()21.0k && |
47 | 140k | !vd->isImplicit()21.0k && vd->getDeclContext() == dc16.5k ) { |
48 | 11.1k | QualType ty = vd->getType(); |
49 | 11.1k | return ty->isScalarType() || ty->isVectorType()1.83k || ty->isRecordType()1.56k ; |
50 | 11.1k | } |
51 | 129k | return false; |
52 | 140k | } |
53 | | |
54 | | //------------------------------------------------------------------------====// |
55 | | // DeclToIndex: a mapping from Decls we track to value indices. |
56 | | //====------------------------------------------------------------------------// |
57 | | |
58 | | namespace { |
59 | | |
60 | | class DeclToIndex { |
61 | | llvm::DenseMap<const VarDecl *, unsigned> map; |
62 | | |
63 | | public: |
64 | 58.3k | DeclToIndex() = default; |
65 | | |
66 | | /// Compute the actual mapping from declarations to bits. |
67 | | void computeMap(const DeclContext &dc); |
68 | | |
69 | | /// Return the number of declarations in the map. |
70 | 119k | unsigned size() const { return map.size(); } |
71 | | |
72 | | /// Returns the bit vector index for a given declaration. |
73 | | Optional<unsigned> getValueIndex(const VarDecl *d) const; |
74 | | }; |
75 | | |
76 | | } // namespace |
77 | | |
78 | 58.3k | void DeclToIndex::computeMap(const DeclContext &dc) { |
79 | 58.3k | unsigned count = 0; |
80 | 58.3k | DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), |
81 | 58.3k | E(dc.decls_end()); |
82 | 177k | for ( ; I != E; ++I119k ) { |
83 | 119k | const VarDecl *vd = *I; |
84 | 119k | if (isTrackedVar(vd, &dc)) |
85 | 1.93k | map[vd] = count++; |
86 | 119k | } |
87 | 58.3k | } |
88 | | |
89 | 12.2k | Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { |
90 | 12.2k | llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d); |
91 | 12.2k | if (I == map.end()) |
92 | 0 | return None; |
93 | 12.2k | return I->second; |
94 | 12.2k | } |
95 | | |
96 | | //------------------------------------------------------------------------====// |
97 | | // CFGBlockValues: dataflow values for CFG blocks. |
98 | | //====------------------------------------------------------------------------// |
99 | | |
100 | | // These values are defined in such a way that a merge can be done using |
101 | | // a bitwise OR. |
102 | | enum Value { Unknown = 0x0, /* 00 */ |
103 | | Initialized = 0x1, /* 01 */ |
104 | | Uninitialized = 0x2, /* 10 */ |
105 | | MayUninitialized = 0x3 /* 11 */ }; |
106 | | |
107 | 10.6k | static bool isUninitialized(const Value v) { |
108 | 10.6k | return v >= Uninitialized; |
109 | 10.6k | } |
110 | | |
111 | 4.69k | static bool isAlwaysUninit(const Value v) { |
112 | 4.69k | return v == Uninitialized; |
113 | 4.69k | } |
114 | | |
115 | | namespace { |
116 | | |
117 | | using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>; |
118 | | |
119 | | class CFGBlockValues { |
120 | | const CFG &cfg; |
121 | | SmallVector<ValueVector, 8> vals; |
122 | | ValueVector scratch; |
123 | | DeclToIndex declToIndex; |
124 | | |
125 | | public: |
126 | | CFGBlockValues(const CFG &cfg); |
127 | | |
128 | 2.74k | unsigned getNumEntries() const { return declToIndex.size(); } |
129 | | |
130 | | void computeSetOfDeclarations(const DeclContext &dc); |
131 | | |
132 | 28.3k | ValueVector &getValueVector(const CFGBlock *block) { |
133 | 28.3k | return vals[block->getBlockID()]; |
134 | 28.3k | } |
135 | | |
136 | | void setAllScratchValues(Value V); |
137 | | void mergeIntoScratch(ValueVector const &source, bool isFirst); |
138 | | bool updateValueVectorWithScratch(const CFGBlock *block); |
139 | | |
140 | 58.3k | bool hasNoDeclarations() const { |
141 | 58.3k | return declToIndex.size() == 0; |
142 | 58.3k | } |
143 | | |
144 | | void resetScratch(); |
145 | | |
146 | | ValueVector::reference operator[](const VarDecl *vd); |
147 | | |
148 | | Value getValue(const CFGBlock *block, const CFGBlock *dstBlock, |
149 | 2.64k | const VarDecl *vd) { |
150 | 2.64k | const Optional<unsigned> &idx = declToIndex.getValueIndex(vd); |
151 | 2.64k | assert(idx); |
152 | 0 | return getValueVector(block)[idx.value()]; |
153 | 2.64k | } |
154 | | }; |
155 | | |
156 | | } // namespace |
157 | | |
158 | 58.3k | CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {} |
159 | | |
160 | 58.3k | void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { |
161 | 58.3k | declToIndex.computeMap(dc); |
162 | 58.3k | unsigned decls = declToIndex.size(); |
163 | 58.3k | scratch.resize(decls); |
164 | 58.3k | unsigned n = cfg.getNumBlockIDs(); |
165 | 58.3k | if (!n) |
166 | 0 | return; |
167 | 58.3k | vals.resize(n); |
168 | 58.3k | for (auto &val : vals) |
169 | 180k | val.resize(decls); |
170 | 58.3k | } |
171 | | |
172 | | #if DEBUG_LOGGING |
173 | | static void printVector(const CFGBlock *block, ValueVector &bv, |
174 | | unsigned num) { |
175 | | llvm::errs() << block->getBlockID() << " :"; |
176 | | for (const auto &i : bv) |
177 | | llvm::errs() << ' ' << i; |
178 | | llvm::errs() << " : " << num << '\n'; |
179 | | } |
180 | | #endif |
181 | | |
182 | 9 | void CFGBlockValues::setAllScratchValues(Value V) { |
183 | 20 | for (unsigned I = 0, E = scratch.size(); I != E; ++I11 ) |
184 | 11 | scratch[I] = V; |
185 | 9 | } |
186 | | |
187 | | void CFGBlockValues::mergeIntoScratch(ValueVector const &source, |
188 | 13.3k | bool isFirst) { |
189 | 13.3k | if (isFirst) |
190 | 10.9k | scratch = source; |
191 | 2.31k | else |
192 | 2.31k | scratch |= source; |
193 | 13.3k | } |
194 | | |
195 | 10.9k | bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { |
196 | 10.9k | ValueVector &dst = getValueVector(block); |
197 | 10.9k | bool changed = (dst != scratch); |
198 | 10.9k | if (changed) |
199 | 8.55k | dst = scratch; |
200 | | #if DEBUG_LOGGING |
201 | | printVector(block, scratch, 0); |
202 | | #endif |
203 | 10.9k | return changed; |
204 | 10.9k | } |
205 | | |
206 | 10.9k | void CFGBlockValues::resetScratch() { |
207 | 10.9k | scratch.reset(); |
208 | 10.9k | } |
209 | | |
210 | 9.61k | ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { |
211 | 9.61k | const Optional<unsigned> &idx = declToIndex.getValueIndex(vd); |
212 | 9.61k | assert(idx); |
213 | 0 | return scratch[idx.value()]; |
214 | 9.61k | } |
215 | | |
216 | | //------------------------------------------------------------------------====// |
217 | | // Classification of DeclRefExprs as use or initialization. |
218 | | //====------------------------------------------------------------------------// |
219 | | |
220 | | namespace { |
221 | | |
222 | | class FindVarResult { |
223 | | const VarDecl *vd; |
224 | | const DeclRefExpr *dr; |
225 | | |
226 | | public: |
227 | 7.75k | FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {} |
228 | | |
229 | 6.54k | const DeclRefExpr *getDeclRefExpr() const { return dr; } |
230 | 1.21k | const VarDecl *getDecl() const { return vd; } |
231 | | }; |
232 | | |
233 | | } // namespace |
234 | | |
235 | 8.97k | static const Expr *stripCasts(ASTContext &C, const Expr *Ex) { |
236 | 8.97k | while (Ex) { |
237 | 8.97k | Ex = Ex->IgnoreParenNoopCasts(C); |
238 | 8.97k | if (const auto *CE = dyn_cast<CastExpr>(Ex)) { |
239 | 566 | if (CE->getCastKind() == CK_LValueBitCast) { |
240 | 2 | Ex = CE->getSubExpr(); |
241 | 2 | continue; |
242 | 2 | } |
243 | 566 | } |
244 | 8.97k | break; |
245 | 8.97k | } |
246 | 8.97k | return Ex; |
247 | 8.97k | } |
248 | | |
249 | | /// If E is an expression comprising a reference to a single variable, find that |
250 | | /// variable. |
251 | 7.75k | static FindVarResult findVar(const Expr *E, const DeclContext *DC) { |
252 | 7.75k | if (const auto *DRE = |
253 | 7.75k | dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E))) |
254 | 7.20k | if (const auto *VD = dyn_cast<VarDecl>(DRE->getDecl())) |
255 | 7.20k | if (isTrackedVar(VD, DC)) |
256 | 3.76k | return FindVarResult(VD, DRE); |
257 | 3.99k | return FindVarResult(nullptr, nullptr); |
258 | 7.75k | } |
259 | | |
260 | | namespace { |
261 | | |
262 | | /// Classify each DeclRefExpr as an initialization or a use. Any |
263 | | /// DeclRefExpr which isn't explicitly classified will be assumed to have |
264 | | /// escaped the analysis and will be treated as an initialization. |
265 | | class ClassifyRefs : public StmtVisitor<ClassifyRefs> { |
266 | | public: |
267 | | enum Class { |
268 | | Init, |
269 | | Use, |
270 | | SelfInit, |
271 | | ConstRefUse, |
272 | | Ignore |
273 | | }; |
274 | | |
275 | | private: |
276 | | const DeclContext *DC; |
277 | | llvm::DenseMap<const DeclRefExpr *, Class> Classification; |
278 | | |
279 | 9.97k | bool isTrackedVar(const VarDecl *VD) const { |
280 | 9.97k | return ::isTrackedVar(VD, DC); |
281 | 9.97k | } |
282 | | |
283 | | void classify(const Expr *E, Class C); |
284 | | |
285 | | public: |
286 | 1.37k | ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {} |
287 | | |
288 | | void VisitDeclStmt(DeclStmt *DS); |
289 | | void VisitUnaryOperator(UnaryOperator *UO); |
290 | | void VisitBinaryOperator(BinaryOperator *BO); |
291 | | void VisitCallExpr(CallExpr *CE); |
292 | | void VisitCastExpr(CastExpr *CE); |
293 | | void VisitOMPExecutableDirective(OMPExecutableDirective *ED); |
294 | | |
295 | 26.4k | void operator()(Stmt *S) { Visit(S); } |
296 | | |
297 | 15.3k | Class get(const DeclRefExpr *DRE) const { |
298 | 15.3k | llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I |
299 | 15.3k | = Classification.find(DRE); |
300 | 15.3k | if (I != Classification.end()) |
301 | 6.71k | return I->second; |
302 | | |
303 | 8.61k | const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()); |
304 | 8.61k | if (!VD || !isTrackedVar(VD)6.82k ) |
305 | 8.06k | return Ignore; |
306 | | |
307 | 557 | return Init; |
308 | 8.61k | } |
309 | | }; |
310 | | |
311 | | } // namespace |
312 | | |
313 | 4.38k | static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) { |
314 | 4.38k | if (VD->getType()->isRecordType()) |
315 | 450 | return nullptr; |
316 | 3.93k | if (Expr *Init = VD->getInit()) { |
317 | 991 | const auto *DRE = |
318 | 991 | dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init)); |
319 | 991 | if (DRE && DRE->getDecl() == VD99 ) |
320 | 28 | return DRE; |
321 | 991 | } |
322 | 3.90k | return nullptr; |
323 | 3.93k | } |
324 | | |
325 | 6.71k | void ClassifyRefs::classify(const Expr *E, Class C) { |
326 | | // The result of a ?: could also be an lvalue. |
327 | 6.71k | E = E->IgnoreParens(); |
328 | 6.71k | if (const auto *CO = dyn_cast<ConditionalOperator>(E)) { |
329 | 14 | classify(CO->getTrueExpr(), C); |
330 | 14 | classify(CO->getFalseExpr(), C); |
331 | 14 | return; |
332 | 14 | } |
333 | | |
334 | 6.70k | if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) { |
335 | 4 | classify(BCO->getFalseExpr(), C); |
336 | 4 | return; |
337 | 4 | } |
338 | | |
339 | 6.69k | if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) { |
340 | 4 | classify(OVE->getSourceExpr(), C); |
341 | 4 | return; |
342 | 4 | } |
343 | | |
344 | 6.69k | if (const auto *ME = dyn_cast<MemberExpr>(E)) { |
345 | 97 | if (const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) { |
346 | 2 | if (!VD->isStaticDataMember()) |
347 | 0 | classify(ME->getBase(), C); |
348 | 2 | } |
349 | 97 | return; |
350 | 97 | } |
351 | | |
352 | 6.59k | if (const auto *BO = dyn_cast<BinaryOperator>(E)) { |
353 | 55 | switch (BO->getOpcode()) { |
354 | 1 | case BO_PtrMemD: |
355 | 1 | case BO_PtrMemI: |
356 | 1 | classify(BO->getLHS(), C); |
357 | 1 | return; |
358 | 26 | case BO_Comma: |
359 | 26 | classify(BO->getRHS(), C); |
360 | 26 | return; |
361 | 28 | default: |
362 | 28 | return; |
363 | 55 | } |
364 | 55 | } |
365 | | |
366 | 6.54k | FindVarResult Var = findVar(E, DC); |
367 | 6.54k | if (const DeclRefExpr *DRE = Var.getDeclRefExpr()) |
368 | 3.22k | Classification[DRE] = std::max(Classification[DRE], C); |
369 | 6.54k | } |
370 | | |
371 | 3.14k | void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) { |
372 | 3.14k | for (auto *DI : DS->decls()) { |
373 | 3.14k | auto *VD = dyn_cast<VarDecl>(DI); |
374 | 3.14k | if (VD && isTrackedVar(VD)) |
375 | 1.93k | if (const DeclRefExpr *DRE = getSelfInitExpr(VD)) |
376 | 9 | Classification[DRE] = SelfInit; |
377 | 3.14k | } |
378 | 3.14k | } |
379 | | |
380 | 3.05k | void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) { |
381 | | // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this |
382 | | // is not a compound-assignment, we will treat it as initializing the variable |
383 | | // when TransferFunctions visits it. A compound-assignment does not affect |
384 | | // whether a variable is uninitialized, and there's no point counting it as a |
385 | | // use. |
386 | 3.05k | if (BO->isCompoundAssignmentOp()) |
387 | 155 | classify(BO->getLHS(), Use); |
388 | 2.90k | else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma2.19k ) |
389 | 765 | classify(BO->getLHS(), Ignore); |
390 | 3.05k | } |
391 | | |
392 | 1.38k | void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) { |
393 | | // Increment and decrement are uses despite there being no lvalue-to-rvalue |
394 | | // conversion. |
395 | 1.38k | if (UO->isIncrementDecrementOp()) |
396 | 996 | classify(UO->getSubExpr(), Use); |
397 | 1.38k | } |
398 | | |
399 | 1.10k | void ClassifyRefs::VisitOMPExecutableDirective(OMPExecutableDirective *ED) { |
400 | 1.10k | for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses())) |
401 | 1.06k | classify(cast<Expr>(S), Use); |
402 | 1.10k | } |
403 | | |
404 | 850 | static bool isPointerToConst(const QualType &QT) { |
405 | 850 | return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified()456 ; |
406 | 850 | } |
407 | | |
408 | 1.28k | static bool hasTrivialBody(CallExpr *CE) { |
409 | 1.28k | if (FunctionDecl *FD = CE->getDirectCallee()) { |
410 | 1.27k | if (FunctionTemplateDecl *FTD = FD->getPrimaryTemplate()) |
411 | 32 | return FTD->getTemplatedDecl()->hasTrivialBody(); |
412 | 1.24k | return FD->hasTrivialBody(); |
413 | 1.27k | } |
414 | 6 | return false; |
415 | 1.28k | } |
416 | | |
417 | 1.31k | void ClassifyRefs::VisitCallExpr(CallExpr *CE) { |
418 | | // Classify arguments to std::move as used. |
419 | 1.31k | if (CE->isCallToStdMove()) { |
420 | | // RecordTypes are handled in SemaDeclCXX.cpp. |
421 | 26 | if (!CE->getArg(0)->getType()->isRecordType()) |
422 | 17 | classify(CE->getArg(0), Use); |
423 | 26 | return; |
424 | 26 | } |
425 | 1.28k | bool isTrivialBody = hasTrivialBody(CE); |
426 | | // If a value is passed by const pointer to a function, |
427 | | // we should not assume that it is initialized by the call, and we |
428 | | // conservatively do not assume that it is used. |
429 | | // If a value is passed by const reference to a function, |
430 | | // it should already be initialized. |
431 | 1.28k | for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end(); |
432 | 2.42k | I != E; ++I1.14k ) { |
433 | 1.14k | if ((*I)->isGLValue()) { |
434 | 293 | if ((*I)->getType().isConstQualified()) |
435 | 25 | classify((*I), isTrivialBody ? Ignore2 : ConstRefUse23 ); |
436 | 850 | } else if (isPointerToConst((*I)->getType())) { |
437 | 209 | const Expr *Ex = stripCasts(DC->getParentASTContext(), *I); |
438 | 209 | const auto *UO = dyn_cast<UnaryOperator>(Ex); |
439 | 209 | if (UO && UO->getOpcode() == UO_AddrOf2 ) |
440 | 2 | Ex = UO->getSubExpr(); |
441 | 209 | classify(Ex, Ignore); |
442 | 209 | } |
443 | 1.14k | } |
444 | 1.28k | } |
445 | | |
446 | 6.78k | void ClassifyRefs::VisitCastExpr(CastExpr *CE) { |
447 | 6.78k | if (CE->getCastKind() == CK_LValueToRValue) |
448 | 3.22k | classify(CE->getSubExpr(), Use); |
449 | 3.55k | else if (const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) { |
450 | 365 | if (CSE->getType()->isVoidType()) { |
451 | | // Squelch any detected load of an uninitialized value if |
452 | | // we cast it to void. |
453 | | // e.g. (void) x; |
454 | 190 | classify(CSE->getSubExpr(), Ignore); |
455 | 190 | } |
456 | 365 | } |
457 | 6.78k | } |
458 | | |
459 | | //------------------------------------------------------------------------====// |
460 | | // Transfer function for uninitialized values analysis. |
461 | | //====------------------------------------------------------------------------// |
462 | | |
463 | | namespace { |
464 | | |
465 | | class TransferFunctions : public StmtVisitor<TransferFunctions> { |
466 | | CFGBlockValues &vals; |
467 | | const CFG &cfg; |
468 | | const CFGBlock *block; |
469 | | AnalysisDeclContext ∾ |
470 | | const ClassifyRefs &classification; |
471 | | ObjCNoReturn objCNoRet; |
472 | | UninitVariablesHandler &handler; |
473 | | |
474 | | public: |
475 | | TransferFunctions(CFGBlockValues &vals, const CFG &cfg, |
476 | | const CFGBlock *block, AnalysisDeclContext &ac, |
477 | | const ClassifyRefs &classification, |
478 | | UninitVariablesHandler &handler) |
479 | | : vals(vals), cfg(cfg), block(block), ac(ac), |
480 | | classification(classification), objCNoRet(ac.getASTContext()), |
481 | 10.9k | handler(handler) {} |
482 | | |
483 | | void reportUse(const Expr *ex, const VarDecl *vd); |
484 | | void reportConstRefUse(const Expr *ex, const VarDecl *vd); |
485 | | |
486 | | void VisitBinaryOperator(BinaryOperator *bo); |
487 | | void VisitBlockExpr(BlockExpr *be); |
488 | | void VisitCallExpr(CallExpr *ce); |
489 | | void VisitDeclRefExpr(DeclRefExpr *dr); |
490 | | void VisitDeclStmt(DeclStmt *ds); |
491 | | void VisitGCCAsmStmt(GCCAsmStmt *as); |
492 | | void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS); |
493 | | void VisitObjCMessageExpr(ObjCMessageExpr *ME); |
494 | | void VisitOMPExecutableDirective(OMPExecutableDirective *ED); |
495 | | |
496 | 4.28k | bool isTrackedVar(const VarDecl *vd) { |
497 | 4.28k | return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); |
498 | 4.28k | } |
499 | | |
500 | 1.21k | FindVarResult findVar(const Expr *ex) { |
501 | 1.21k | return ::findVar(ex, cast<DeclContext>(ac.getDecl())); |
502 | 1.21k | } |
503 | | |
504 | 4.65k | UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) { |
505 | 4.65k | UninitUse Use(ex, isAlwaysUninit(v)); |
506 | | |
507 | 4.65k | assert(isUninitialized(v)); |
508 | 4.65k | if (Use.getKind() == UninitUse::Always) |
509 | 4.38k | return Use; |
510 | | |
511 | | // If an edge which leads unconditionally to this use did not initialize |
512 | | // the variable, we can say something stronger than 'may be uninitialized': |
513 | | // we can say 'either it's used uninitialized or you have dead code'. |
514 | | // |
515 | | // We track the number of successors of a node which have been visited, and |
516 | | // visit a node once we have visited all of its successors. Only edges where |
517 | | // the variable might still be uninitialized are followed. Since a variable |
518 | | // can't transfer from being initialized to being uninitialized, this will |
519 | | // trace out the subgraph which inevitably leads to the use and does not |
520 | | // initialize the variable. We do not want to skip past loops, since their |
521 | | // non-termination might be correlated with the initialization condition. |
522 | | // |
523 | | // For example: |
524 | | // |
525 | | // void f(bool a, bool b) { |
526 | | // block1: int n; |
527 | | // if (a) { |
528 | | // block2: if (b) |
529 | | // block3: n = 1; |
530 | | // block4: } else if (b) { |
531 | | // block5: while (!a) { |
532 | | // block6: do_work(&a); |
533 | | // n = 2; |
534 | | // } |
535 | | // } |
536 | | // block7: if (a) |
537 | | // block8: g(); |
538 | | // block9: return n; |
539 | | // } |
540 | | // |
541 | | // Starting from the maybe-uninitialized use in block 9: |
542 | | // * Block 7 is not visited because we have only visited one of its two |
543 | | // successors. |
544 | | // * Block 8 is visited because we've visited its only successor. |
545 | | // From block 8: |
546 | | // * Block 7 is visited because we've now visited both of its successors. |
547 | | // From block 7: |
548 | | // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all |
549 | | // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively). |
550 | | // * Block 3 is not visited because it initializes 'n'. |
551 | | // Now the algorithm terminates, having visited blocks 7 and 8, and having |
552 | | // found the frontier is blocks 2, 4, and 5. |
553 | | // |
554 | | // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2 |
555 | | // and 4), so we report that any time either of those edges is taken (in |
556 | | // each case when 'b == false'), 'n' is used uninitialized. |
557 | 262 | SmallVector<const CFGBlock*, 32> Queue; |
558 | 262 | SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0); |
559 | 262 | Queue.push_back(block); |
560 | | // Specify that we've already visited all successors of the starting block. |
561 | | // This has the dual purpose of ensuring we never add it to the queue, and |
562 | | // of marking it as not being a candidate element of the frontier. |
563 | 262 | SuccsVisited[block->getBlockID()] = block->succ_size(); |
564 | 1.66k | while (!Queue.empty()) { |
565 | 1.40k | const CFGBlock *B = Queue.pop_back_val(); |
566 | | |
567 | | // If the use is always reached from the entry block, make a note of that. |
568 | 1.40k | if (B == &cfg.getEntry()) |
569 | 48 | Use.setUninitAfterCall(); |
570 | | |
571 | 1.40k | for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end(); |
572 | 3.35k | I != E; ++I1.95k ) { |
573 | 1.95k | const CFGBlock *Pred = *I; |
574 | 1.95k | if (!Pred) |
575 | 4 | continue; |
576 | | |
577 | 1.94k | Value AtPredExit = vals.getValue(Pred, B, vd); |
578 | 1.94k | if (AtPredExit == Initialized) |
579 | | // This block initializes the variable. |
580 | 222 | continue; |
581 | 1.72k | if (AtPredExit == MayUninitialized && |
582 | 1.72k | vals.getValue(B, nullptr, vd) == Uninitialized481 ) { |
583 | | // This block declares the variable (uninitialized), and is reachable |
584 | | // from a block that initializes the variable. We can't guarantee to |
585 | | // give an earlier location for the diagnostic (and it appears that |
586 | | // this code is intended to be reachable) so give a diagnostic here |
587 | | // and go no further down this path. |
588 | 19 | Use.setUninitAfterDecl(); |
589 | 19 | continue; |
590 | 19 | } |
591 | | |
592 | 1.70k | if (AtPredExit == MayUninitialized) { |
593 | | // If the predecessor's terminator is an "asm goto" that initializes |
594 | | // the variable, then don't count it as "initialized" on the indirect |
595 | | // paths. |
596 | 462 | CFGTerminator term = Pred->getTerminator(); |
597 | 462 | if (const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.getStmt())) { |
598 | 32 | const CFGBlock *fallthrough = *Pred->succ_begin(); |
599 | 32 | if (as->isAsmGoto() && |
600 | 42 | llvm::any_of(as->outputs(), [&](const Expr *output) 32 { |
601 | 42 | return vd == findVar(output).getDecl() && |
602 | 42 | llvm::any_of(as->labels(), |
603 | 44 | [&](const AddrLabelExpr *label) { |
604 | 44 | return label->getLabel()->getStmt() == B->Label && |
605 | 44 | B != fallthrough18 ; |
606 | 44 | }); |
607 | 42 | })) { |
608 | 16 | Use.setUninitAfterDecl(); |
609 | 16 | continue; |
610 | 16 | } |
611 | 32 | } |
612 | 462 | } |
613 | | |
614 | 1.69k | unsigned &SV = SuccsVisited[Pred->getBlockID()]; |
615 | 1.69k | if (!SV) { |
616 | | // When visiting the first successor of a block, mark all NULL |
617 | | // successors as having been visited. |
618 | 1.34k | for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(), |
619 | 1.34k | SE = Pred->succ_end(); |
620 | 3.30k | SI != SE; ++SI1.96k ) |
621 | 1.96k | if (!*SI) |
622 | 44 | ++SV; |
623 | 1.34k | } |
624 | | |
625 | 1.69k | if (++SV == Pred->succ_size()) |
626 | | // All paths from this block lead to the use and don't initialize the |
627 | | // variable. |
628 | 1.14k | Queue.push_back(Pred); |
629 | 1.69k | } |
630 | 1.40k | } |
631 | | |
632 | | // Scan the frontier, looking for blocks where the variable was |
633 | | // uninitialized. |
634 | 3.79k | for (const auto *Block : cfg) { |
635 | 3.79k | unsigned BlockID = Block->getBlockID(); |
636 | 3.79k | const Stmt *Term = Block->getTerminatorStmt(); |
637 | 3.79k | if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size()1.60k && |
638 | 3.79k | Term199 ) { |
639 | | // This block inevitably leads to the use. If we have an edge from here |
640 | | // to a post-dominator block, and the variable is uninitialized on that |
641 | | // edge, we have found a bug. |
642 | 199 | for (CFGBlock::const_succ_iterator I = Block->succ_begin(), |
643 | 629 | E = Block->succ_end(); I != E; ++I430 ) { |
644 | 430 | const CFGBlock *Succ = *I; |
645 | 430 | if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() && |
646 | 430 | vals.getValue(Block, Succ, vd) == Uninitialized215 ) { |
647 | | // Switch cases are a special case: report the label to the caller |
648 | | // as the 'terminator', not the switch statement itself. Suppress |
649 | | // situations where no label matched: we can't be sure that's |
650 | | // possible. |
651 | 136 | if (isa<SwitchStmt>(Term)) { |
652 | 24 | const Stmt *Label = Succ->getLabel(); |
653 | 24 | if (!Label || !isa<SwitchCase>(Label)8 ) |
654 | | // Might not be possible. |
655 | 16 | continue; |
656 | 8 | UninitUse::Branch Branch; |
657 | 8 | Branch.Terminator = Label; |
658 | 8 | Branch.Output = 0; // Ignored. |
659 | 8 | Use.addUninitBranch(Branch); |
660 | 112 | } else { |
661 | 112 | UninitUse::Branch Branch; |
662 | 112 | Branch.Terminator = Term; |
663 | 112 | Branch.Output = I - Block->succ_begin(); |
664 | 112 | Use.addUninitBranch(Branch); |
665 | 112 | } |
666 | 136 | } |
667 | 430 | } |
668 | 199 | } |
669 | 3.79k | } |
670 | | |
671 | 262 | return Use; |
672 | 4.65k | } |
673 | | }; |
674 | | |
675 | | } // namespace |
676 | | |
677 | 6.04k | void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) { |
678 | 6.04k | Value v = vals[vd]; |
679 | 6.04k | if (isUninitialized(v)) |
680 | 4.62k | handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v)); |
681 | 6.04k | } |
682 | | |
683 | 44 | void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) { |
684 | 44 | Value v = vals[vd]; |
685 | 44 | if (isAlwaysUninit(v)) |
686 | 31 | handler.handleConstRefUseOfUninitVariable(vd, getUninitUse(ex, vd, v)); |
687 | 44 | } |
688 | | |
689 | 4 | void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) { |
690 | | // This represents an initialization of the 'element' value. |
691 | 4 | if (const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) { |
692 | 2 | const auto *VD = cast<VarDecl>(DS->getSingleDecl()); |
693 | 2 | if (isTrackedVar(VD)) |
694 | 2 | vals[VD] = Initialized; |
695 | 2 | } |
696 | 4 | } |
697 | | |
698 | | void TransferFunctions::VisitOMPExecutableDirective( |
699 | 2.39k | OMPExecutableDirective *ED) { |
700 | 2.39k | for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses())) { |
701 | 2.32k | assert(S && "Expected non-null used-in-clause child."); |
702 | 0 | Visit(S); |
703 | 2.32k | } |
704 | 2.39k | if (!ED->isStandaloneDirective()) |
705 | 2.32k | Visit(ED->getStructuredBlock()); |
706 | 2.39k | } |
707 | | |
708 | 18 | void TransferFunctions::VisitBlockExpr(BlockExpr *be) { |
709 | 18 | const BlockDecl *bd = be->getBlockDecl(); |
710 | 20 | for (const auto &I : bd->captures()) { |
711 | 20 | const VarDecl *vd = I.getVariable(); |
712 | 20 | if (!isTrackedVar(vd)) |
713 | 2 | continue; |
714 | 18 | if (I.isByRef()) { |
715 | 8 | vals[vd] = Initialized; |
716 | 8 | continue; |
717 | 8 | } |
718 | 10 | reportUse(be, vd); |
719 | 10 | } |
720 | 18 | } |
721 | | |
722 | 2.28k | void TransferFunctions::VisitCallExpr(CallExpr *ce) { |
723 | 2.28k | if (Decl *Callee = ce->getCalleeDecl()) { |
724 | 2.28k | if (Callee->hasAttr<ReturnsTwiceAttr>()) { |
725 | | // After a call to a function like setjmp or vfork, any variable which is |
726 | | // initialized anywhere within this function may now be initialized. For |
727 | | // now, just assume such a call initializes all variables. FIXME: Only |
728 | | // mark variables as initialized if they have an initializer which is |
729 | | // reachable from here. |
730 | 2 | vals.setAllScratchValues(Initialized); |
731 | 2 | } |
732 | 2.28k | else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) { |
733 | | // Functions labeled like "analyzer_noreturn" are often used to denote |
734 | | // "panic" functions that in special debug situations can still return, |
735 | | // but for the most part should not be treated as returning. This is a |
736 | | // useful annotation borrowed from the static analyzer that is useful for |
737 | | // suppressing branch-specific false positives when we call one of these |
738 | | // functions but keep pretending the path continues (when in reality the |
739 | | // user doesn't care). |
740 | 4 | vals.setAllScratchValues(Unknown); |
741 | 4 | } |
742 | 2.28k | } |
743 | 2.28k | } |
744 | | |
745 | 15.3k | void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { |
746 | 15.3k | switch (classification.get(dr)) { |
747 | 8.67k | case ClassifyRefs::Ignore: |
748 | 8.67k | break; |
749 | 6.03k | case ClassifyRefs::Use: |
750 | 6.03k | reportUse(dr, cast<VarDecl>(dr->getDecl())); |
751 | 6.03k | break; |
752 | 557 | case ClassifyRefs::Init: |
753 | 557 | vals[cast<VarDecl>(dr->getDecl())] = Initialized; |
754 | 557 | break; |
755 | 19 | case ClassifyRefs::SelfInit: |
756 | 19 | handler.handleSelfInit(cast<VarDecl>(dr->getDecl())); |
757 | 19 | break; |
758 | 44 | case ClassifyRefs::ConstRefUse: |
759 | 44 | reportConstRefUse(dr, cast<VarDecl>(dr->getDecl())); |
760 | 44 | break; |
761 | 15.3k | } |
762 | 15.3k | } |
763 | | |
764 | 4.75k | void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) { |
765 | 4.75k | if (BO->getOpcode() == BO_Assign) { |
766 | 1.16k | FindVarResult Var = findVar(BO->getLHS()); |
767 | 1.16k | if (const VarDecl *VD = Var.getDecl()) |
768 | 496 | vals[VD] = Initialized; |
769 | 1.16k | } |
770 | 4.75k | } |
771 | | |
772 | 4.26k | void TransferFunctions::VisitDeclStmt(DeclStmt *DS) { |
773 | 4.26k | for (auto *DI : DS->decls()) { |
774 | 4.26k | auto *VD = dyn_cast<VarDecl>(DI); |
775 | 4.26k | if (VD && isTrackedVar(VD)) { |
776 | 2.45k | if (getSelfInitExpr(VD)) { |
777 | | // If the initializer consists solely of a reference to itself, we |
778 | | // explicitly mark the variable as uninitialized. This allows code |
779 | | // like the following: |
780 | | // |
781 | | // int x = x; |
782 | | // |
783 | | // to deliberately leave a variable uninitialized. Different analysis |
784 | | // clients can detect this pattern and adjust their reporting |
785 | | // appropriately, but we need to continue to analyze subsequent uses |
786 | | // of the variable. |
787 | 19 | vals[VD] = Uninitialized; |
788 | 2.43k | } else if (VD->getInit()) { |
789 | | // Treat the new variable as initialized. |
790 | 861 | vals[VD] = Initialized; |
791 | 1.57k | } else { |
792 | | // No initializer: the variable is now uninitialized. This matters |
793 | | // for cases like: |
794 | | // while (...) { |
795 | | // int n; |
796 | | // use(n); |
797 | | // n = 0; |
798 | | // } |
799 | | // FIXME: Mark the variable as uninitialized whenever its scope is |
800 | | // left, since its scope could be re-entered by a jump over the |
801 | | // declaration. |
802 | 1.57k | vals[VD] = Uninitialized; |
803 | 1.57k | } |
804 | 2.45k | } |
805 | 4.26k | } |
806 | 4.26k | } |
807 | | |
808 | 17 | void TransferFunctions::VisitGCCAsmStmt(GCCAsmStmt *as) { |
809 | | // An "asm goto" statement is a terminator that may initialize some variables. |
810 | 17 | if (!as->isAsmGoto()) |
811 | 8 | return; |
812 | | |
813 | 9 | ASTContext &C = ac.getASTContext(); |
814 | 12 | for (const Expr *O : as->outputs()) { |
815 | 12 | const Expr *Ex = stripCasts(C, O); |
816 | | |
817 | | // Strip away any unary operators. Invalid l-values are reported by other |
818 | | // semantic analysis passes. |
819 | 14 | while (const auto *UO = dyn_cast<UnaryOperator>(Ex)) |
820 | 2 | Ex = stripCasts(C, UO->getSubExpr()); |
821 | | |
822 | | // Mark the variable as potentially uninitialized for those cases where |
823 | | // it's used on an indirect path, where it's not guaranteed to be |
824 | | // defined. |
825 | 12 | if (const VarDecl *VD = findVar(Ex).getDecl()) |
826 | 9 | vals[VD] = MayUninitialized; |
827 | 12 | } |
828 | 9 | } |
829 | | |
830 | 3 | void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) { |
831 | | // If the Objective-C message expression is an implicit no-return that |
832 | | // is not modeled in the CFG, set the tracked dataflow values to Unknown. |
833 | 3 | if (objCNoRet.isImplicitNoReturn(ME)) { |
834 | 3 | vals.setAllScratchValues(Unknown); |
835 | 3 | } |
836 | 3 | } |
837 | | |
838 | | //------------------------------------------------------------------------====// |
839 | | // High-level "driver" logic for uninitialized values analysis. |
840 | | //====------------------------------------------------------------------------// |
841 | | |
842 | | static bool runOnBlock(const CFGBlock *block, const CFG &cfg, |
843 | | AnalysisDeclContext &ac, CFGBlockValues &vals, |
844 | | const ClassifyRefs &classification, |
845 | | llvm::BitVector &wasAnalyzed, |
846 | 10.9k | UninitVariablesHandler &handler) { |
847 | 10.9k | wasAnalyzed[block->getBlockID()] = true; |
848 | 10.9k | vals.resetScratch(); |
849 | | // Merge in values of predecessor blocks. |
850 | 10.9k | bool isFirst = true; |
851 | 10.9k | for (CFGBlock::const_pred_iterator I = block->pred_begin(), |
852 | 25.4k | E = block->pred_end(); I != E; ++I14.4k ) { |
853 | 14.4k | const CFGBlock *pred = *I; |
854 | 14.4k | if (!pred) |
855 | 31 | continue; |
856 | 14.4k | if (wasAnalyzed[pred->getBlockID()]) { |
857 | 13.3k | vals.mergeIntoScratch(vals.getValueVector(pred), isFirst); |
858 | 13.3k | isFirst = false; |
859 | 13.3k | } |
860 | 14.4k | } |
861 | | // Apply the transfer function. |
862 | 10.9k | TransferFunctions tf(vals, cfg, block, ac, classification, handler); |
863 | 40.9k | for (const auto &I : *block) { |
864 | 40.9k | if (Optional<CFGStmt> cs = I.getAs<CFGStmt>()) |
865 | 40.8k | tf.Visit(const_cast<Stmt *>(cs->getStmt())); |
866 | 40.9k | } |
867 | 10.9k | CFGTerminator terminator = block->getTerminator(); |
868 | 10.9k | if (auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.getStmt())) |
869 | 9 | if (as->isAsmGoto()) |
870 | 9 | tf.Visit(as); |
871 | 10.9k | return vals.updateValueVectorWithScratch(block); |
872 | 10.9k | } |
873 | | |
874 | | namespace { |
875 | | |
876 | | /// PruneBlocksHandler is a special UninitVariablesHandler that is used |
877 | | /// to detect when a CFGBlock has any *potential* use of an uninitialized |
878 | | /// variable. It is mainly used to prune out work during the final |
879 | | /// reporting pass. |
880 | | struct PruneBlocksHandler : public UninitVariablesHandler { |
881 | | /// Records if a CFGBlock had a potential use of an uninitialized variable. |
882 | | llvm::BitVector hadUse; |
883 | | |
884 | | /// Records if any CFGBlock had a potential use of an uninitialized variable. |
885 | | bool hadAnyUse = false; |
886 | | |
887 | | /// The current block to scribble use information. |
888 | | unsigned currentBlock = 0; |
889 | | |
890 | 1.37k | PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {} |
891 | | |
892 | 1.37k | ~PruneBlocksHandler() override = default; |
893 | | |
894 | | void handleUseOfUninitVariable(const VarDecl *vd, |
895 | 2.33k | const UninitUse &use) override { |
896 | 2.33k | hadUse[currentBlock] = true; |
897 | 2.33k | hadAnyUse = true; |
898 | 2.33k | } |
899 | | |
900 | | void handleConstRefUseOfUninitVariable(const VarDecl *vd, |
901 | 16 | const UninitUse &use) override { |
902 | 16 | hadUse[currentBlock] = true; |
903 | 16 | hadAnyUse = true; |
904 | 16 | } |
905 | | |
906 | | /// Called when the uninitialized variable analysis detects the |
907 | | /// idiom 'int x = x'. All other uses of 'x' within the initializer |
908 | | /// are handled by handleUseOfUninitVariable. |
909 | 10 | void handleSelfInit(const VarDecl *vd) override { |
910 | 10 | hadUse[currentBlock] = true; |
911 | 10 | hadAnyUse = true; |
912 | 10 | } |
913 | | }; |
914 | | |
915 | | } // namespace |
916 | | |
917 | | void clang::runUninitializedVariablesAnalysis( |
918 | | const DeclContext &dc, |
919 | | const CFG &cfg, |
920 | | AnalysisDeclContext &ac, |
921 | | UninitVariablesHandler &handler, |
922 | 58.3k | UninitVariablesAnalysisStats &stats) { |
923 | 58.3k | CFGBlockValues vals(cfg); |
924 | 58.3k | vals.computeSetOfDeclarations(dc); |
925 | 58.3k | if (vals.hasNoDeclarations()) |
926 | 57.0k | return; |
927 | | |
928 | 1.37k | stats.NumVariablesAnalyzed = vals.getNumEntries(); |
929 | | |
930 | | // Precompute which expressions are uses and which are initializations. |
931 | 1.37k | ClassifyRefs classification(ac); |
932 | 1.37k | cfg.VisitBlockStmts(classification); |
933 | | |
934 | | // Mark all variables uninitialized at the entry. |
935 | 1.37k | const CFGBlock &entry = cfg.getEntry(); |
936 | 1.37k | ValueVector &vec = vals.getValueVector(&entry); |
937 | 1.37k | const unsigned n = vals.getNumEntries(); |
938 | 3.30k | for (unsigned j = 0; j < n; ++j1.93k ) { |
939 | 1.93k | vec[j] = Uninitialized; |
940 | 1.93k | } |
941 | | |
942 | | // Proceed with the workist. |
943 | 1.37k | ForwardDataflowWorklist worklist(cfg, ac); |
944 | 1.37k | llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); |
945 | 1.37k | worklist.enqueueSuccessors(&cfg.getEntry()); |
946 | 1.37k | llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); |
947 | 1.37k | wasAnalyzed[cfg.getEntry().getBlockID()] = true; |
948 | 1.37k | PruneBlocksHandler PBH(cfg.getNumBlockIDs()); |
949 | | |
950 | 11.1k | while (const CFGBlock *block = worklist.dequeue()) { |
951 | 9.79k | PBH.currentBlock = block->getBlockID(); |
952 | | |
953 | | // Did the block change? |
954 | 9.79k | bool changed = runOnBlock(block, cfg, ac, vals, |
955 | 9.79k | classification, wasAnalyzed, PBH); |
956 | 9.79k | ++stats.NumBlockVisits; |
957 | 9.79k | if (changed || !previouslyVisited[block->getBlockID()]1.24k ) |
958 | 8.55k | worklist.enqueueSuccessors(block); |
959 | 9.79k | previouslyVisited[block->getBlockID()] = true; |
960 | 9.79k | } |
961 | | |
962 | 1.37k | if (!PBH.hadAnyUse) |
963 | 273 | return; |
964 | | |
965 | | // Run through the blocks one more time, and report uninitialized variables. |
966 | 1.09k | for (const auto *block : cfg) |
967 | 6.36k | if (PBH.hadUse[block->getBlockID()]) { |
968 | 1.19k | runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler); |
969 | 1.19k | ++stats.NumBlockVisits; |
970 | 1.19k | } |
971 | 1.09k | } |
972 | | |
973 | 59.7k | UninitVariablesHandler::~UninitVariablesHandler() = default; |