Coverage Report

Created: 2020-09-19 12:23

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Analysis/UninitializedValues.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- UninitializedValues.cpp - Find Uninitialized Values ----------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file implements uninitialized values analysis for source-level CFGs.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "clang/Analysis/Analyses/UninitializedValues.h"
14
#include "clang/AST/Attr.h"
15
#include "clang/AST/Decl.h"
16
#include "clang/AST/DeclBase.h"
17
#include "clang/AST/Expr.h"
18
#include "clang/AST/OperationKinds.h"
19
#include "clang/AST/Stmt.h"
20
#include "clang/AST/StmtObjC.h"
21
#include "clang/AST/StmtVisitor.h"
22
#include "clang/AST/Type.h"
23
#include "clang/Analysis/Analyses/PostOrderCFGView.h"
24
#include "clang/Analysis/AnalysisDeclContext.h"
25
#include "clang/Analysis/CFG.h"
26
#include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
27
#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
28
#include "clang/Basic/LLVM.h"
29
#include "llvm/ADT/BitVector.h"
30
#include "llvm/ADT/DenseMap.h"
31
#include "llvm/ADT/None.h"
32
#include "llvm/ADT/Optional.h"
33
#include "llvm/ADT/PackedVector.h"
34
#include "llvm/ADT/SmallBitVector.h"
35
#include "llvm/ADT/SmallVector.h"
36
#include "llvm/Support/Casting.h"
37
#include <algorithm>
38
#include <cassert>
39
40
using namespace clang;
41
42
#define DEBUG_LOGGING 0
43
44
111k
static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
45
111k
  if (vd->isLocalVarDecl() && 
!vd->hasGlobalStorage()21.7k
&&
46
21.6k
      !vd->isExceptionVariable() && 
!vd->isInitCapture()21.6k
&&
47
21.6k
      !vd->isImplicit() && 
vd->getDeclContext() == dc17.2k
) {
48
12.4k
    QualType ty = vd->getType();
49
12.4k
    return ty->isScalarType() || 
ty->isVectorType()3.23k
||
ty->isRecordType()1.60k
;
50
12.4k
  }
51
98.8k
  return false;
52
98.8k
}
53
54
//------------------------------------------------------------------------====//
55
// DeclToIndex: a mapping from Decls we track to value indices.
56
//====------------------------------------------------------------------------//
57
58
namespace {
59
60
class DeclToIndex {
61
  llvm::DenseMap<const VarDecl *, unsigned> map;
62
63
public:
64
40.9k
  DeclToIndex() = default;
65
66
  /// Compute the actual mapping from declarations to bits.
67
  void computeMap(const DeclContext &dc);
68
69
  /// Return the number of declarations in the map.
70
84.5k
  unsigned size() const { return map.size(); }
71
72
  /// Returns the bit vector index for a given declaration.
73
  Optional<unsigned> getValueIndex(const VarDecl *d) const;
74
};
75
76
} // namespace
77
78
40.9k
void DeclToIndex::computeMap(const DeclContext &dc) {
79
40.9k
  unsigned count = 0;
80
40.9k
  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
81
40.9k
                                               E(dc.decls_end());
82
120k
  for ( ; I != E; 
++I79.2k
) {
83
79.2k
    const VarDecl *vd = *I;
84
79.2k
    if (isTrackedVar(vd, &dc))
85
2.06k
      map[vd] = count++;
86
79.2k
  }
87
40.9k
}
88
89
12.3k
Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
90
12.3k
  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
91
12.3k
  if (I == map.end())
92
0
    return None;
93
12.3k
  return I->second;
94
12.3k
}
95
96
//------------------------------------------------------------------------====//
97
// CFGBlockValues: dataflow values for CFG blocks.
98
//====------------------------------------------------------------------------//
99
100
// These values are defined in such a way that a merge can be done using
101
// a bitwise OR.
102
enum Value { Unknown = 0x0,         /* 00 */
103
             Initialized = 0x1,     /* 01 */
104
             Uninitialized = 0x2,   /* 10 */
105
             MayUninitialized = 0x3 /* 11 */ };
106
107
10.1k
static bool isUninitialized(const Value v) {
108
10.1k
  return v >= Uninitialized;
109
10.1k
}
110
111
4.31k
static bool isAlwaysUninit(const Value v) {
112
4.31k
  return v == Uninitialized;
113
4.31k
}
114
115
namespace {
116
117
using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>;
118
119
class CFGBlockValues {
120
  const CFG &cfg;
121
  SmallVector<ValueVector, 8> vals;
122
  ValueVector scratch;
123
  DeclToIndex declToIndex;
124
125
public:
126
  CFGBlockValues(const CFG &cfg);
127
128
2.62k
  unsigned getNumEntries() const { return declToIndex.size(); }
129
130
  void computeSetOfDeclarations(const DeclContext &dc);
131
132
27.4k
  ValueVector &getValueVector(const CFGBlock *block) {
133
27.4k
    return vals[block->getBlockID()];
134
27.4k
  }
135
136
  void setAllScratchValues(Value V);
137
  void mergeIntoScratch(ValueVector const &source, bool isFirst);
138
  bool updateValueVectorWithScratch(const CFGBlock *block);
139
140
40.9k
  bool hasNoDeclarations() const {
141
40.9k
    return declToIndex.size() == 0;
142
40.9k
  }
143
144
  void resetScratch();
145
146
  ValueVector::reference operator[](const VarDecl *vd);
147
148
  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
149
2.61k
                 const VarDecl *vd) {
150
2.61k
    const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
151
2.61k
    assert(idx.hasValue());
152
2.61k
    return getValueVector(block)[idx.getValue()];
153
2.61k
  }
154
};
155
156
} // namespace
157
158
40.9k
CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
159
160
40.9k
void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
161
40.9k
  declToIndex.computeMap(dc);
162
40.9k
  unsigned decls = declToIndex.size();
163
40.9k
  scratch.resize(decls);
164
40.9k
  unsigned n = cfg.getNumBlockIDs();
165
40.9k
  if (!n)
166
0
    return;
167
40.9k
  vals.resize(n);
168
40.9k
  for (auto &val : vals)
169
129k
    val.resize(decls);
170
40.9k
}
171
172
#if DEBUG_LOGGING
173
static void printVector(const CFGBlock *block, ValueVector &bv,
174
                        unsigned num) {
175
  llvm::errs() << block->getBlockID() << " :";
176
  for (const auto &i : bv)
177
    llvm::errs() << ' ' << i;
178
  llvm::errs() << " : " << num << '\n';
179
}
180
#endif
181
182
27
void CFGBlockValues::setAllScratchValues(Value V) {
183
74
  for (unsigned I = 0, E = scratch.size(); I != E; 
++I47
)
184
47
    scratch[I] = V;
185
27
}
186
187
void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
188
12.9k
                                      bool isFirst) {
189
12.9k
  if (isFirst)
190
10.6k
    scratch = source;
191
2.22k
  else
192
2.22k
    scratch |= source;
193
12.9k
}
194
195
10.6k
bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
196
10.6k
  ValueVector &dst = getValueVector(block);
197
10.6k
  bool changed = (dst != scratch);
198
10.6k
  if (changed)
199
8.42k
    dst = scratch;
200
#if DEBUG_LOGGING
201
  printVector(block, scratch, 0);
202
#endif
203
10.6k
  return changed;
204
10.6k
}
205
206
10.6k
void CFGBlockValues::resetScratch() {
207
10.6k
  scratch.reset();
208
10.6k
}
209
210
9.77k
ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
211
9.77k
  const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
212
9.77k
  assert(idx.hasValue());
213
9.77k
  return scratch[idx.getValue()];
214
9.77k
}
215
216
//------------------------------------------------------------------------====//
217
// Classification of DeclRefExprs as use or initialization.
218
//====------------------------------------------------------------------------//
219
220
namespace {
221
222
class FindVarResult {
223
  const VarDecl *vd;
224
  const DeclRefExpr *dr;
225
226
public:
227
14.1k
  FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
228
229
11.5k
  const DeclRefExpr *getDeclRefExpr() const { return dr; }
230
2.58k
  const VarDecl *getDecl() const { return vd; }
231
};
232
233
} // namespace
234
235
15.6k
static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
236
15.6k
  while (Ex) {
237
15.6k
    Ex = Ex->IgnoreParenNoopCasts(C);
238
15.6k
    if (const auto *CE = dyn_cast<CastExpr>(Ex)) {
239
748
      if (CE->getCastKind() == CK_LValueBitCast) {
240
2
        Ex = CE->getSubExpr();
241
2
        continue;
242
2
      }
243
15.6k
    }
244
15.6k
    break;
245
15.6k
  }
246
15.6k
  return Ex;
247
15.6k
}
248
249
/// If E is an expression comprising a reference to a single variable, find that
250
/// variable.
251
14.1k
static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
252
14.1k
  if (const auto *DRE =
253
13.6k
          dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
254
13.6k
    if (const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
255
13.6k
      if (isTrackedVar(VD, DC))
256
4.62k
        return FindVarResult(VD, DRE);
257
9.52k
  return FindVarResult(nullptr, nullptr);
258
9.52k
}
259
260
namespace {
261
262
/// Classify each DeclRefExpr as an initialization or a use. Any
263
/// DeclRefExpr which isn't explicitly classified will be assumed to have
264
/// escaped the analysis and will be treated as an initialization.
265
class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
266
public:
267
  enum Class {
268
    Init,
269
    Use,
270
    SelfInit,
271
    ConstRefUse,
272
    Ignore
273
  };
274
275
private:
276
  const DeclContext *DC;
277
  llvm::DenseMap<const DeclRefExpr *, Class> Classification;
278
279
14.1k
  bool isTrackedVar(const VarDecl *VD) const {
280
14.1k
    return ::isTrackedVar(VD, DC);
281
14.1k
  }
282
283
  void classify(const Expr *E, Class C);
284
285
public:
286
1.31k
  ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
287
288
  void VisitDeclStmt(DeclStmt *DS);
289
  void VisitUnaryOperator(UnaryOperator *UO);
290
  void VisitBinaryOperator(BinaryOperator *BO);
291
  void VisitCallExpr(CallExpr *CE);
292
  void VisitCastExpr(CastExpr *CE);
293
  void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
294
295
44.1k
  void operator()(Stmt *S) { Visit(S); }
296
297
21.1k
  Class get(const DeclRefExpr *DRE) const {
298
21.1k
    llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
299
21.1k
        = Classification.find(DRE);
300
21.1k
    if (I != Classification.end())
301
6.73k
      return I->second;
302
14.4k
303
14.4k
    const auto *VD = dyn_cast<VarDecl>(DRE->getDecl());
304
14.4k
    if (!VD || 
!isTrackedVar(VD)10.9k
)
305
13.8k
      return Ignore;
306
603
307
603
    return Init;
308
603
  }
309
};
310
311
} // namespace
312
313
4.58k
static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
314
4.58k
  if (VD->getType()->isRecordType())
315
456
    return nullptr;
316
4.13k
  if (Expr *Init = VD->getInit()) {
317
1.18k
    const auto *DRE =
318
1.18k
        dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
319
1.18k
    if (DRE && 
DRE->getDecl() == VD117
)
320
28
      return DRE;
321
4.10k
  }
322
4.10k
  return nullptr;
323
4.10k
}
324
325
11.7k
void ClassifyRefs::classify(const Expr *E, Class C) {
326
  // The result of a ?: could also be an lvalue.
327
11.7k
  E = E->IgnoreParens();
328
11.7k
  if (const auto *CO = dyn_cast<ConditionalOperator>(E)) {
329
14
    classify(CO->getTrueExpr(), C);
330
14
    classify(CO->getFalseExpr(), C);
331
14
    return;
332
14
  }
333
11.7k
334
11.7k
  if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(E)) {
335
4
    classify(BCO->getFalseExpr(), C);
336
4
    return;
337
4
  }
338
11.7k
339
11.7k
  if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E)) {
340
4
    classify(OVE->getSourceExpr(), C);
341
4
    return;
342
4
  }
343
11.7k
344
11.7k
  if (const auto *ME = dyn_cast<MemberExpr>(E)) {
345
101
    if (const auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
346
2
      if (!VD->isStaticDataMember())
347
0
        classify(ME->getBase(), C);
348
2
    }
349
101
    return;
350
101
  }
351
11.6k
352
11.6k
  if (const auto *BO = dyn_cast<BinaryOperator>(E)) {
353
55
    switch (BO->getOpcode()) {
354
1
    case BO_PtrMemD:
355
1
    case BO_PtrMemI:
356
1
      classify(BO->getLHS(), C);
357
1
      return;
358
26
    case BO_Comma:
359
26
      classify(BO->getRHS(), C);
360
26
      return;
361
28
    default:
362
28
      return;
363
11.5k
    }
364
11.5k
  }
365
11.5k
366
11.5k
  FindVarResult Var = findVar(E, DC);
367
11.5k
  if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
368
3.96k
    Classification[DRE] = std::max(Classification[DRE], C);
369
11.5k
}
370
371
3.18k
void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
372
3.18k
  for (auto *DI : DS->decls()) {
373
3.18k
    auto *VD = dyn_cast<VarDecl>(DI);
374
3.18k
    if (VD && isTrackedVar(VD))
375
2.04k
      if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
376
9
        Classification[DRE] = SelfInit;
377
3.18k
  }
378
3.18k
}
379
380
4.69k
void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
381
  // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
382
  // is not a compound-assignment, we will treat it as initializing the variable
383
  // when TransferFunctions visits it. A compound-assignment does not affect
384
  // whether a variable is uninitialized, and there's no point counting it as a
385
  // use.
386
4.69k
  if (BO->isCompoundAssignmentOp())
387
261
    classify(BO->getLHS(), Use);
388
4.43k
  else if (BO->getOpcode() == BO_Assign || 
BO->getOpcode() == BO_Comma2.19k
)
389
2.29k
    classify(BO->getLHS(), Ignore);
390
4.69k
}
391
392
1.35k
void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
393
  // Increment and decrement are uses despite there being no lvalue-to-rvalue
394
  // conversion.
395
1.35k
  if (UO->isIncrementDecrementOp())
396
910
    classify(UO->getSubExpr(), Use);
397
1.35k
}
398
399
997
void ClassifyRefs::VisitOMPExecutableDirective(OMPExecutableDirective *ED) {
400
997
  for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses()))
401
969
    classify(cast<Expr>(S), Use);
402
997
}
403
404
4.98k
static bool isPointerToConst(const QualType &QT) {
405
4.98k
  return QT->isAnyPointerType() && 
QT->getPointeeType().isConstQualified()780
;
406
4.98k
}
407
408
3.17k
static bool hasTrivialBody(CallExpr *CE) {
409
3.17k
  if (FunctionDecl *FD = CE->getDirectCallee()) {
410
3.17k
    if (FunctionTemplateDecl *FTD = FD->getPrimaryTemplate())
411
14
      return FTD->getTemplatedDecl()->hasTrivialBody();
412
3.15k
    return FD->hasTrivialBody();
413
3.15k
  }
414
6
  return false;
415
6
}
416
417
3.19k
void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
418
  // Classify arguments to std::move as used.
419
3.19k
  if (CE->isCallToStdMove()) {
420
    // RecordTypes are handled in SemaDeclCXX.cpp.
421
18
    if (!CE->getArg(0)->getType()->isRecordType())
422
9
      classify(CE->getArg(0), Use);
423
18
    return;
424
18
  }
425
3.17k
  bool isTrivialBody = hasTrivialBody(CE);
426
  // If a value is passed by const pointer to a function,
427
  // we should not assume that it is initialized by the call, and we
428
  // conservatively do not assume that it is used.
429
  // If a value is passed by const reference to a function,
430
  // it should already be initialized.
431
3.17k
  for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
432
8.43k
       I != E; 
++I5.25k
) {
433
5.25k
    if ((*I)->isGLValue()) {
434
276
      if ((*I)->getType().isConstQualified())
435
26
        classify((*I), isTrivialBody ? 
Ignore2
:
ConstRefUse24
);
436
4.98k
    } else if (isPointerToConst((*I)->getType())) {
437
337
      const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
438
337
      const auto *UO = dyn_cast<UnaryOperator>(Ex);
439
337
      if (UO && 
UO->getOpcode() == UO_AddrOf2
)
440
2
        Ex = UO->getSubExpr();
441
337
      classify(Ex, Ignore);
442
337
    }
443
5.25k
  }
444
3.17k
}
445
446
13.8k
void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
447
13.8k
  if (CE->getCastKind() == CK_LValueToRValue)
448
6.69k
    classify(CE->getSubExpr(), Use);
449
7.17k
  else if (const auto *CSE = dyn_cast<CStyleCastExpr>(CE)) {
450
1.38k
    if (CSE->getType()->isVoidType()) {
451
      // Squelch any detected load of an uninitialized value if
452
      // we cast it to void.
453
      // e.g. (void) x;
454
184
      classify(CSE->getSubExpr(), Ignore);
455
184
    }
456
1.38k
  }
457
13.8k
}
458
459
//------------------------------------------------------------------------====//
460
// Transfer function for uninitialized values analysis.
461
//====------------------------------------------------------------------------//
462
463
namespace {
464
465
class TransferFunctions : public StmtVisitor<TransferFunctions> {
466
  CFGBlockValues &vals;
467
  const CFG &cfg;
468
  const CFGBlock *block;
469
  AnalysisDeclContext &ac;
470
  const ClassifyRefs &classification;
471
  ObjCNoReturn objCNoRet;
472
  UninitVariablesHandler &handler;
473
474
public:
475
  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
476
                    const CFGBlock *block, AnalysisDeclContext &ac,
477
                    const ClassifyRefs &classification,
478
                    UninitVariablesHandler &handler)
479
      : vals(vals), cfg(cfg), block(block), ac(ac),
480
        classification(classification), objCNoRet(ac.getASTContext()),
481
10.6k
        handler(handler) {}
482
483
  void reportUse(const Expr *ex, const VarDecl *vd);
484
  void reportConstRefUse(const Expr *ex, const VarDecl *vd);
485
486
  void VisitBinaryOperator(BinaryOperator *bo);
487
  void VisitBlockExpr(BlockExpr *be);
488
  void VisitCallExpr(CallExpr *ce);
489
  void VisitDeclRefExpr(DeclRefExpr *dr);
490
  void VisitDeclStmt(DeclStmt *ds);
491
  void VisitGCCAsmStmt(GCCAsmStmt *as);
492
  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
493
  void VisitObjCMessageExpr(ObjCMessageExpr *ME);
494
  void VisitOMPExecutableDirective(OMPExecutableDirective *ED);
495
496
4.29k
  bool isTrackedVar(const VarDecl *vd) {
497
4.29k
    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
498
4.29k
  }
499
500
2.58k
  FindVarResult findVar(const Expr *ex) {
501
2.58k
    return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
502
2.58k
  }
503
504
4.27k
  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
505
4.27k
    UninitUse Use(ex, isAlwaysUninit(v));
506
4.27k
507
4.27k
    assert(isUninitialized(v));
508
4.27k
    if (Use.getKind() == UninitUse::Always)
509
4.01k
      return Use;
510
256
511
    // If an edge which leads unconditionally to this use did not initialize
512
    // the variable, we can say something stronger than 'may be uninitialized':
513
    // we can say 'either it's used uninitialized or you have dead code'.
514
    //
515
    // We track the number of successors of a node which have been visited, and
516
    // visit a node once we have visited all of its successors. Only edges where
517
    // the variable might still be uninitialized are followed. Since a variable
518
    // can't transfer from being initialized to being uninitialized, this will
519
    // trace out the subgraph which inevitably leads to the use and does not
520
    // initialize the variable. We do not want to skip past loops, since their
521
    // non-termination might be correlated with the initialization condition.
522
    //
523
    // For example:
524
    //
525
    //         void f(bool a, bool b) {
526
    // block1:   int n;
527
    //           if (a) {
528
    // block2:     if (b)
529
    // block3:       n = 1;
530
    // block4:   } else if (b) {
531
    // block5:     while (!a) {
532
    // block6:       do_work(&a);
533
    //               n = 2;
534
    //             }
535
    //           }
536
    // block7:   if (a)
537
    // block8:     g();
538
    // block9:   return n;
539
    //         }
540
    //
541
    // Starting from the maybe-uninitialized use in block 9:
542
    //  * Block 7 is not visited because we have only visited one of its two
543
    //    successors.
544
    //  * Block 8 is visited because we've visited its only successor.
545
    // From block 8:
546
    //  * Block 7 is visited because we've now visited both of its successors.
547
    // From block 7:
548
    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
549
    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
550
    //  * Block 3 is not visited because it initializes 'n'.
551
    // Now the algorithm terminates, having visited blocks 7 and 8, and having
552
    // found the frontier is blocks 2, 4, and 5.
553
    //
554
    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
555
    // and 4), so we report that any time either of those edges is taken (in
556
    // each case when 'b == false'), 'n' is used uninitialized.
557
256
    SmallVector<const CFGBlock*, 32> Queue;
558
256
    SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
559
256
    Queue.push_back(block);
560
    // Specify that we've already visited all successors of the starting block.
561
    // This has the dual purpose of ensuring we never add it to the queue, and
562
    // of marking it as not being a candidate element of the frontier.
563
256
    SuccsVisited[block->getBlockID()] = block->succ_size();
564
1.65k
    while (!Queue.empty()) {
565
1.39k
      const CFGBlock *B = Queue.pop_back_val();
566
1.39k
567
      // If the use is always reached from the entry block, make a note of that.
568
1.39k
      if (B == &cfg.getEntry())
569
48
        Use.setUninitAfterCall();
570
1.39k
571
1.39k
      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
572
3.33k
           I != E; 
++I1.93k
) {
573
1.93k
        const CFGBlock *Pred = *I;
574
1.93k
        if (!Pred)
575
4
          continue;
576
1.93k
577
1.93k
        Value AtPredExit = vals.getValue(Pred, B, vd);
578
1.93k
        if (AtPredExit == Initialized)
579
          // This block initializes the variable.
580
222
          continue;
581
1.71k
        if (AtPredExit == MayUninitialized &&
582
467
            vals.getValue(B, nullptr, vd) == Uninitialized) {
583
          // This block declares the variable (uninitialized), and is reachable
584
          // from a block that initializes the variable. We can't guarantee to
585
          // give an earlier location for the diagnostic (and it appears that
586
          // this code is intended to be reachable) so give a diagnostic here
587
          // and go no further down this path.
588
19
          Use.setUninitAfterDecl();
589
19
          continue;
590
19
        }
591
1.69k
592
1.69k
        if (AtPredExit == MayUninitialized) {
593
          // If the predecessor's terminator is an "asm goto" that initializes
594
          // the variable, then it won't be counted as "initialized" on the
595
          // non-fallthrough paths.
596
448
          CFGTerminator term = Pred->getTerminator();
597
448
          if (const auto *as = dyn_cast_or_null<GCCAsmStmt>(term.getStmt())) {
598
22
            const CFGBlock *fallthrough = *Pred->succ_begin();
599
22
            if (as->isAsmGoto() &&
600
32
                
llvm::any_of(as->outputs(), [&](const Expr *output) 22
{
601
32
                    return vd == findVar(output).getDecl() &&
602
22
                        llvm::any_of(as->labels(),
603
32
                                     [&](const AddrLabelExpr *label) {
604
32
                          return label->getLabel()->getStmt() == B->Label &&
605
10
                              B != fallthrough;
606
32
                        });
607
8
                })) {
608
8
              Use.setUninitAfterDecl();
609
8
              continue;
610
8
            }
611
1.68k
          }
612
448
        }
613
1.68k
614
1.68k
        unsigned &SV = SuccsVisited[Pred->getBlockID()];
615
1.68k
        if (!SV) {
616
          // When visiting the first successor of a block, mark all NULL
617
          // successors as having been visited.
618
1.33k
          for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
619
1.33k
                                             SE = Pred->succ_end();
620
3.29k
               SI != SE; 
++SI1.95k
)
621
1.95k
            if (!*SI)
622
44
              ++SV;
623
1.33k
        }
624
1.68k
625
1.68k
        if (++SV == Pred->succ_size())
626
          // All paths from this block lead to the use and don't initialize the
627
          // variable.
628
1.14k
          Queue.push_back(Pred);
629
1.68k
      }
630
1.39k
    }
631
256
632
    // Scan the frontier, looking for blocks where the variable was
633
    // uninitialized.
634
3.75k
    for (const auto *Block : cfg) {
635
3.75k
      unsigned BlockID = Block->getBlockID();
636
3.75k
      const Stmt *Term = Block->getTerminatorStmt();
637
3.75k
      if (SuccsVisited[BlockID] && 
SuccsVisited[BlockID] < Block->succ_size()1.59k
&&
638
197
          Term) {
639
        // This block inevitably leads to the use. If we have an edge from here
640
        // to a post-dominator block, and the variable is uninitialized on that
641
        // edge, we have found a bug.
642
197
        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
643
623
             E = Block->succ_end(); I != E; 
++I426
) {
644
426
          const CFGBlock *Succ = *I;
645
426
          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
646
213
              vals.getValue(Block, Succ, vd) == Uninitialized) {
647
            // Switch cases are a special case: report the label to the caller
648
            // as the 'terminator', not the switch statement itself. Suppress
649
            // situations where no label matched: we can't be sure that's
650
            // possible.
651
136
            if (isa<SwitchStmt>(Term)) {
652
24
              const Stmt *Label = Succ->getLabel();
653
24
              if (!Label || 
!isa<SwitchCase>(Label)8
)
654
                // Might not be possible.
655
16
                continue;
656
8
              UninitUse::Branch Branch;
657
8
              Branch.Terminator = Label;
658
8
              Branch.Output = 0; // Ignored.
659
8
              Use.addUninitBranch(Branch);
660
112
            } else {
661
112
              UninitUse::Branch Branch;
662
112
              Branch.Terminator = Term;
663
112
              Branch.Output = I - Block->succ_begin();
664
112
              Use.addUninitBranch(Branch);
665
112
            }
666
136
          }
667
426
        }
668
197
      }
669
3.75k
    }
670
256
671
256
    return Use;
672
256
  }
673
};
674
675
} // namespace
676
677
5.92k
void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
678
5.92k
  Value v = vals[vd];
679
5.92k
  if (isUninitialized(v))
680
4.24k
    handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
681
5.92k
}
682
683
44
void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) {
684
44
  Value v = vals[vd];
685
44
  if (isAlwaysUninit(v))
686
31
    handler.handleConstRefUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
687
44
}
688
689
4
void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
690
  // This represents an initialization of the 'element' value.
691
4
  if (const auto *DS = dyn_cast<DeclStmt>(FS->getElement())) {
692
2
    const auto *VD = cast<VarDecl>(DS->getSingleDecl());
693
2
    if (isTrackedVar(VD))
694
2
      vals[VD] = Initialized;
695
2
  }
696
4
}
697
698
void TransferFunctions::VisitOMPExecutableDirective(
699
2.17k
    OMPExecutableDirective *ED) {
700
2.13k
  for (Stmt *S : OMPExecutableDirective::used_clauses_children(ED->clauses())) {
701
2.13k
    assert(S && "Expected non-null used-in-clause child.");
702
2.13k
    Visit(S);
703
2.13k
  }
704
2.17k
  if (!ED->isStandaloneDirective())
705
2.12k
    Visit(ED->getStructuredBlock());
706
2.17k
}
707
708
18
void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
709
18
  const BlockDecl *bd = be->getBlockDecl();
710
20
  for (const auto &I : bd->captures()) {
711
20
    const VarDecl *vd = I.getVariable();
712
20
    if (!isTrackedVar(vd))
713
2
      continue;
714
18
    if (I.isByRef()) {
715
8
      vals[vd] = Initialized;
716
8
      continue;
717
8
    }
718
10
    reportUse(be, vd);
719
10
  }
720
18
}
721
722
3.99k
void TransferFunctions::VisitCallExpr(CallExpr *ce) {
723
3.99k
  if (Decl *Callee = ce->getCalleeDecl()) {
724
3.99k
    if (Callee->hasAttr<ReturnsTwiceAttr>()) {
725
      // After a call to a function like setjmp or vfork, any variable which is
726
      // initialized anywhere within this function may now be initialized. For
727
      // now, just assume such a call initializes all variables.  FIXME: Only
728
      // mark variables as initialized if they have an initializer which is
729
      // reachable from here.
730
20
      vals.setAllScratchValues(Initialized);
731
20
    }
732
3.97k
    else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
733
      // Functions labeled like "analyzer_noreturn" are often used to denote
734
      // "panic" functions that in special debug situations can still return,
735
      // but for the most part should not be treated as returning.  This is a
736
      // useful annotation borrowed from the static analyzer that is useful for
737
      // suppressing branch-specific false positives when we call one of these
738
      // functions but keep pretending the path continues (when in reality the
739
      // user doesn't care).
740
4
      vals.setAllScratchValues(Unknown);
741
4
    }
742
3.99k
  }
743
3.99k
}
744
745
21.1k
void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
746
21.1k
  switch (classification.get(dr)) {
747
14.6k
  case ClassifyRefs::Ignore:
748
14.6k
    break;
749
5.91k
  case ClassifyRefs::Use:
750
5.91k
    reportUse(dr, cast<VarDecl>(dr->getDecl()));
751
5.91k
    break;
752
603
  case ClassifyRefs::Init:
753
603
    vals[cast<VarDecl>(dr->getDecl())] = Initialized;
754
603
    break;
755
19
  case ClassifyRefs::SelfInit:
756
19
    handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
757
19
    break;
758
44
  case ClassifyRefs::ConstRefUse:
759
44
    reportConstRefUse(dr, cast<VarDecl>(dr->getDecl()));
760
44
    break;
761
21.1k
  }
762
21.1k
}
763
764
6.09k
void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
765
6.09k
  if (BO->getOpcode() == BO_Assign) {
766
2.53k
    FindVarResult Var = findVar(BO->getLHS());
767
2.53k
    if (const VarDecl *VD = Var.getDecl())
768
639
      vals[VD] = Initialized;
769
2.53k
  }
770
6.09k
}
771
772
4.27k
void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
773
4.27k
  for (auto *DI : DS->decls()) {
774
4.27k
    auto *VD = dyn_cast<VarDecl>(DI);
775
4.27k
    if (VD && isTrackedVar(VD)) {
776
2.53k
      if (getSelfInitExpr(VD)) {
777
        // If the initializer consists solely of a reference to itself, we
778
        // explicitly mark the variable as uninitialized. This allows code
779
        // like the following:
780
        //
781
        //   int x = x;
782
        //
783
        // to deliberately leave a variable uninitialized. Different analysis
784
        // clients can detect this pattern and adjust their reporting
785
        // appropriately, but we need to continue to analyze subsequent uses
786
        // of the variable.
787
19
        vals[VD] = Uninitialized;
788
2.51k
      } else if (VD->getInit()) {
789
        // Treat the new variable as initialized.
790
956
        vals[VD] = Initialized;
791
1.56k
      } else {
792
        // No initializer: the variable is now uninitialized. This matters
793
        // for cases like:
794
        //   while (...) {
795
        //     int n;
796
        //     use(n);
797
        //     n = 0;
798
        //   }
799
        // FIXME: Mark the variable as uninitialized whenever its scope is
800
        // left, since its scope could be re-entered by a jump over the
801
        // declaration.
802
1.56k
        vals[VD] = Uninitialized;
803
1.56k
      }
804
2.53k
    }
805
4.27k
  }
806
4.27k
}
807
808
41
void TransferFunctions::VisitGCCAsmStmt(GCCAsmStmt *as) {
809
  // An "asm goto" statement is a terminator that may initialize some variables.
810
41
  if (!as->isAsmGoto())
811
35
    return;
812
6
813
6
  for (const Expr *o : as->outputs())
814
9
    if (const VarDecl *VD = findVar(o).getDecl())
815
6
      if (vals[VD] != Initialized)
816
        // If the variable isn't initialized by the time we get here, then we
817
        // mark it as potentially uninitialized for those cases where it's used
818
        // on an indirect path, where it's not guaranteed to be defined.
819
6
        vals[VD] = MayUninitialized;
820
6
}
821
822
3
void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
823
  // If the Objective-C message expression is an implicit no-return that
824
  // is not modeled in the CFG, set the tracked dataflow values to Unknown.
825
3
  if (objCNoRet.isImplicitNoReturn(ME)) {
826
3
    vals.setAllScratchValues(Unknown);
827
3
  }
828
3
}
829
830
//------------------------------------------------------------------------====//
831
// High-level "driver" logic for uninitialized values analysis.
832
//====------------------------------------------------------------------------//
833
834
static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
835
                       AnalysisDeclContext &ac, CFGBlockValues &vals,
836
                       const ClassifyRefs &classification,
837
                       llvm::BitVector &wasAnalyzed,
838
10.6k
                       UninitVariablesHandler &handler) {
839
10.6k
  wasAnalyzed[block->getBlockID()] = true;
840
10.6k
  vals.resetScratch();
841
  // Merge in values of predecessor blocks.
842
10.6k
  bool isFirst = true;
843
10.6k
  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
844
24.8k
       E = block->pred_end(); I != E; 
++I14.1k
) {
845
14.1k
    const CFGBlock *pred = *I;
846
14.1k
    if (!pred)
847
39
      continue;
848
14.1k
    if (wasAnalyzed[pred->getBlockID()]) {
849
12.9k
      vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
850
12.9k
      isFirst = false;
851
12.9k
    }
852
14.1k
  }
853
  // Apply the transfer function.
854
10.6k
  TransferFunctions tf(vals, cfg, block, ac, classification, handler);
855
55.7k
  for (const auto &I : *block) {
856
55.7k
    if (Optional<CFGStmt> cs = I.getAs<CFGStmt>())
857
55.6k
      tf.Visit(const_cast<Stmt *>(cs->getStmt()));
858
55.7k
  }
859
10.6k
  CFGTerminator terminator = block->getTerminator();
860
10.6k
  if (auto *as = dyn_cast_or_null<GCCAsmStmt>(terminator.getStmt()))
861
6
    if (as->isAsmGoto())
862
6
      tf.Visit(as);
863
10.6k
  return vals.updateValueVectorWithScratch(block);
864
10.6k
}
865
866
namespace {
867
868
/// PruneBlocksHandler is a special UninitVariablesHandler that is used
869
/// to detect when a CFGBlock has any *potential* use of an uninitialized
870
/// variable.  It is mainly used to prune out work during the final
871
/// reporting pass.
872
struct PruneBlocksHandler : public UninitVariablesHandler {
873
  /// Records if a CFGBlock had a potential use of an uninitialized variable.
874
  llvm::BitVector hadUse;
875
876
  /// Records if any CFGBlock had a potential use of an uninitialized variable.
877
  bool hadAnyUse = false;
878
879
  /// The current block to scribble use information.
880
  unsigned currentBlock = 0;
881
882
1.31k
  PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {}
883
884
1.31k
  ~PruneBlocksHandler() override = default;
885
886
  void handleUseOfUninitVariable(const VarDecl *vd,
887
2.14k
                                 const UninitUse &use) override {
888
2.14k
    hadUse[currentBlock] = true;
889
2.14k
    hadAnyUse = true;
890
2.14k
  }
891
892
  void handleConstRefUseOfUninitVariable(const VarDecl *vd,
893
16
                                         const UninitUse &use) override {
894
16
    hadUse[currentBlock] = true;
895
16
    hadAnyUse = true;
896
16
  }
897
  
898
  /// Called when the uninitialized variable analysis detects the
899
  /// idiom 'int x = x'.  All other uses of 'x' within the initializer
900
  /// are handled by handleUseOfUninitVariable.
901
10
  void handleSelfInit(const VarDecl *vd) override {
902
10
    hadUse[currentBlock] = true;
903
10
    hadAnyUse = true;
904
10
  }
905
};
906
907
} // namespace
908
909
void clang::runUninitializedVariablesAnalysis(
910
    const DeclContext &dc,
911
    const CFG &cfg,
912
    AnalysisDeclContext &ac,
913
    UninitVariablesHandler &handler,
914
40.9k
    UninitVariablesAnalysisStats &stats) {
915
40.9k
  CFGBlockValues vals(cfg);
916
40.9k
  vals.computeSetOfDeclarations(dc);
917
40.9k
  if (vals.hasNoDeclarations())
918
39.6k
    return;
919
1.31k
920
1.31k
  stats.NumVariablesAnalyzed = vals.getNumEntries();
921
1.31k
922
  // Precompute which expressions are uses and which are initializations.
923
1.31k
  ClassifyRefs classification(ac);
924
1.31k
  cfg.VisitBlockStmts(classification);
925
1.31k
926
  // Mark all variables uninitialized at the entry.
927
1.31k
  const CFGBlock &entry = cfg.getEntry();
928
1.31k
  ValueVector &vec = vals.getValueVector(&entry);
929
1.31k
  const unsigned n = vals.getNumEntries();
930
3.37k
  for (unsigned j = 0; j < n; 
++j2.06k
) {
931
2.06k
    vec[j] = Uninitialized;
932
2.06k
  }
933
1.31k
934
  // Proceed with the workist.
935
1.31k
  ForwardDataflowWorklist worklist(cfg, ac);
936
1.31k
  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
937
1.31k
  worklist.enqueueSuccessors(&cfg.getEntry());
938
1.31k
  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
939
1.31k
  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
940
1.31k
  PruneBlocksHandler PBH(cfg.getNumBlockIDs());
941
1.31k
942
10.8k
  while (const CFGBlock *block = worklist.dequeue()) {
943
9.57k
    PBH.currentBlock = block->getBlockID();
944
9.57k
945
    // Did the block change?
946
9.57k
    bool changed = runOnBlock(block, cfg, ac, vals,
947
9.57k
                              classification, wasAnalyzed, PBH);
948
9.57k
    ++stats.NumBlockVisits;
949
9.57k
    if (changed || 
!previouslyVisited[block->getBlockID()]1.15k
)
950
8.43k
      worklist.enqueueSuccessors(block);
951
9.57k
    previouslyVisited[block->getBlockID()] = true;
952
9.57k
  }
953
1.31k
954
1.31k
  if (!PBH.hadAnyUse)
955
316
    return;
956
997
957
  // Run through the blocks one more time, and report uninitialized variables.
958
997
  for (const auto *block : cfg)
959
5.81k
    if (PBH.hadUse[block->getBlockID()]) {
960
1.09k
      runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
961
1.09k
      ++stats.NumBlockVisits;
962
1.09k
    }
963
997
}
964
965
42.2k
UninitVariablesHandler::~UninitVariablesHandler() = default;