Coverage Report

Created: 2022-01-25 06:29

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/AST/DeclObjC.h"
18
#include "clang/AST/EvaluatedExprVisitor.h"
19
#include "clang/AST/ExprCXX.h"
20
#include "clang/AST/ExprObjC.h"
21
#include "clang/AST/ParentMap.h"
22
#include "clang/AST/RecursiveASTVisitor.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/AST/StmtObjC.h"
25
#include "clang/AST/StmtVisitor.h"
26
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27
#include "clang/Analysis/Analyses/CalledOnceCheck.h"
28
#include "clang/Analysis/Analyses/Consumed.h"
29
#include "clang/Analysis/Analyses/ReachableCode.h"
30
#include "clang/Analysis/Analyses/ThreadSafety.h"
31
#include "clang/Analysis/Analyses/UninitializedValues.h"
32
#include "clang/Analysis/AnalysisDeclContext.h"
33
#include "clang/Analysis/CFG.h"
34
#include "clang/Analysis/CFGStmtMap.h"
35
#include "clang/Basic/SourceLocation.h"
36
#include "clang/Basic/SourceManager.h"
37
#include "clang/Lex/Preprocessor.h"
38
#include "clang/Sema/ScopeInfo.h"
39
#include "clang/Sema/SemaInternal.h"
40
#include "llvm/ADT/ArrayRef.h"
41
#include "llvm/ADT/BitVector.h"
42
#include "llvm/ADT/MapVector.h"
43
#include "llvm/ADT/SmallString.h"
44
#include "llvm/ADT/SmallVector.h"
45
#include "llvm/ADT/StringRef.h"
46
#include "llvm/Support/Casting.h"
47
#include <algorithm>
48
#include <deque>
49
#include <iterator>
50
51
using namespace clang;
52
53
//===----------------------------------------------------------------------===//
54
// Unreachable code analysis.
55
//===----------------------------------------------------------------------===//
56
57
namespace {
58
  class UnreachableCodeHandler : public reachable_code::Callback {
59
    Sema &S;
60
    SourceRange PreviousSilenceableCondVal;
61
62
  public:
63
162
    UnreachableCodeHandler(Sema &s) : S(s) {}
64
65
    void HandleUnreachable(reachable_code::UnreachableKind UK,
66
                           SourceLocation L,
67
                           SourceRange SilenceableCondVal,
68
                           SourceRange R1,
69
177
                           SourceRange R2) override {
70
      // Avoid reporting multiple unreachable code diagnostics that are
71
      // triggered by the same conditional value.
72
177
      if (PreviousSilenceableCondVal.isValid() &&
73
177
          
SilenceableCondVal.isValid()25
&&
74
177
          
PreviousSilenceableCondVal == SilenceableCondVal25
)
75
2
        return;
76
175
      PreviousSilenceableCondVal = SilenceableCondVal;
77
78
175
      unsigned diag = diag::warn_unreachable;
79
175
      switch (UK) {
80
16
        case reachable_code::UK_Break:
81
16
          diag = diag::warn_unreachable_break;
82
16
          break;
83
34
        case reachable_code::UK_Return:
84
34
          diag = diag::warn_unreachable_return;
85
34
          break;
86
3
        case reachable_code::UK_Loop_Increment:
87
3
          diag = diag::warn_unreachable_loop_increment;
88
3
          break;
89
122
        case reachable_code::UK_Other:
90
122
          break;
91
175
      }
92
93
175
      S.Diag(L, diag) << R1 << R2;
94
95
175
      SourceLocation Open = SilenceableCondVal.getBegin();
96
175
      if (Open.isValid()) {
97
43
        SourceLocation Close = SilenceableCondVal.getEnd();
98
43
        Close = S.getLocForEndOfToken(Close);
99
43
        if (Close.isValid()) {
100
43
          S.Diag(Open, diag::note_unreachable_silence)
101
43
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
102
43
            << FixItHint::CreateInsertion(Close, ")");
103
43
        }
104
43
      }
105
175
    }
106
  };
107
} // anonymous namespace
108
109
/// CheckUnreachable - Check for unreachable code.
110
164
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
111
  // As a heuristic prune all diagnostics not in the main file.  Currently
112
  // the majority of warnings in headers are false positives.  These
113
  // are largely caused by configuration state, e.g. preprocessor
114
  // defined code, etc.
115
  //
116
  // Note that this is also a performance optimization.  Analyzing
117
  // headers many times can be expensive.
118
164
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
119
2
    return;
120
121
162
  UnreachableCodeHandler UC(S);
122
162
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
123
162
}
124
125
namespace {
126
/// Warn on logical operator errors in CFGBuilder
127
class LogicalErrorHandler : public CFGCallback {
128
  Sema &S;
129
130
public:
131
40.1k
  LogicalErrorHandler(Sema &S) : S(S) {}
132
133
1.22k
  static bool HasMacroID(const Expr *E) {
134
1.22k
    if (E->getExprLoc().isMacroID())
135
6
      return true;
136
137
    // Recurse to children.
138
1.21k
    for (const Stmt *SubStmt : E->children())
139
1.07k
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
140
1.07k
        if (HasMacroID(SubExpr))
141
16
          return true;
142
143
1.19k
    return false;
144
1.21k
  }
145
146
104
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
147
104
    if (HasMacroID(B))
148
2
      return;
149
150
102
    SourceRange DiagRange = B->getSourceRange();
151
102
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
152
102
        << DiagRange << isAlwaysTrue;
153
102
  }
154
155
  void compareBitwiseEquality(const BinaryOperator *B,
156
28
                              bool isAlwaysTrue) override {
157
28
    if (HasMacroID(B))
158
4
      return;
159
160
24
    SourceRange DiagRange = B->getSourceRange();
161
24
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
162
24
        << DiagRange << isAlwaysTrue;
163
24
  }
164
165
16
  void compareBitwiseOr(const BinaryOperator *B) override {
166
16
    if (HasMacroID(B))
167
0
      return;
168
169
16
    SourceRange DiagRange = B->getSourceRange();
170
16
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
171
16
  }
172
173
  static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
174
520k
                                   SourceLocation Loc) {
175
520k
    return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
176
520k
           
!Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc)439k
;
177
520k
  }
178
};
179
} // anonymous namespace
180
181
//===----------------------------------------------------------------------===//
182
// Check for infinite self-recursion in functions
183
//===----------------------------------------------------------------------===//
184
185
// Returns true if the function is called anywhere within the CFGBlock.
186
// For member functions, the additional condition of being call from the
187
// this pointer is required.
188
41.7k
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
189
  // Process all the Stmt's in this block to find any calls to FD.
190
426k
  for (const auto &B : Block) {
191
426k
    if (B.getKind() != CFGElement::Statement)
192
744
      continue;
193
194
425k
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
195
425k
    if (!CE || 
!CE->getCalleeDecl()41.9k
||
196
425k
        
CE->getCalleeDecl()->getCanonicalDecl() != FD41.9k
)
197
425k
      continue;
198
199
    // Skip function calls which are qualified with a templated class.
200
17
    if (const DeclRefExpr *DRE =
201
17
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
202
14
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
203
1
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
204
1
            isa<TemplateSpecializationType>(NNS->getAsType())) {
205
1
          continue;
206
1
        }
207
1
      }
208
14
    }
209
210
16
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
211
16
    if (!MCE || 
isa<CXXThisExpr>(MCE->getImplicitObjectArgument())3
||
212
16
        
!MCE->getMethodDecl()->isVirtual()1
)
213
16
      return true;
214
16
  }
215
41.7k
  return false;
216
41.7k
}
217
218
// Returns true if every path from the entry block passes through a call to FD.
219
40.1k
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
220
40.1k
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
221
40.1k
  llvm::SmallVector<CFGBlock *, 16> WorkList;
222
  // Keep track of whether we found at least one recursive path.
223
40.1k
  bool foundRecursion = false;
224
225
40.1k
  const unsigned ExitID = cfg->getExit().getBlockID();
226
227
  // Seed the work list with the entry block.
228
40.1k
  WorkList.push_back(&cfg->getEntry());
229
230
81.1k
  while (!WorkList.empty()) {
231
81.1k
    CFGBlock *Block = WorkList.pop_back_val();
232
233
123k
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; 
++I41.8k
) {
234
81.9k
      if (CFGBlock *SuccBlock = *I) {
235
81.8k
        if (!Visited.insert(SuccBlock).second)
236
55
          continue;
237
238
        // Found a path to the exit node without a recursive call.
239
81.8k
        if (ExitID == SuccBlock->getBlockID())
240
40.0k
          return false;
241
242
        // If the successor block contains a recursive call, end analysis there.
243
41.7k
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
244
16
          foundRecursion = true;
245
16
          continue;
246
16
        }
247
248
41.7k
        WorkList.push_back(SuccBlock);
249
41.7k
      }
250
81.9k
    }
251
81.1k
  }
252
15
  return foundRecursion;
253
40.1k
}
254
255
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
256
40.1k
                                   const Stmt *Body, AnalysisDeclContext &AC) {
257
40.1k
  FD = FD->getCanonicalDecl();
258
259
  // Only run on non-templated functions and non-templated members of
260
  // templated classes.
261
40.1k
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
262
40.1k
      
FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization50
)
263
26
    return;
264
265
40.1k
  CFG *cfg = AC.getCFG();
266
40.1k
  if (!cfg) 
return0
;
267
268
  // If the exit block is unreachable, skip processing the function.
269
40.1k
  if (cfg->getExit().pred_empty())
270
7
    return;
271
272
  // Emit diagnostic if a recursive function call is detected for all paths.
273
40.1k
  if (checkForRecursiveFunctionCall(FD, cfg))
274
13
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
275
40.1k
}
276
277
//===----------------------------------------------------------------------===//
278
// Check for throw in a non-throwing function.
279
//===----------------------------------------------------------------------===//
280
281
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
282
/// can reach ExitBlock.
283
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
284
81
                         CFG *Body) {
285
81
  SmallVector<CFGBlock *, 16> Stack;
286
81
  llvm::BitVector Queued(Body->getNumBlockIDs());
287
288
81
  Stack.push_back(&ThrowBlock);
289
81
  Queued[ThrowBlock.getBlockID()] = true;
290
291
173
  while (!Stack.empty()) {
292
138
    CFGBlock &UnwindBlock = *Stack.back();
293
138
    Stack.pop_back();
294
295
160
    for (auto &Succ : UnwindBlock.succs()) {
296
160
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
297
0
        continue;
298
299
160
      if (Succ->getBlockID() == Body->getExit().getBlockID())
300
46
        return true;
301
302
114
      if (auto *Catch =
303
114
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
304
57
        QualType Caught = Catch->getCaughtType();
305
57
        if (Caught.isNull() || // catch (...) catches everything
306
57
            
!E->getSubExpr()53
|| // throw; is considered cuaght by any handler
307
57
            
S.handlerCanCatch(Caught, E->getSubExpr()->getType())50
)
308
          // Exception doesn't escape via this path.
309
35
          break;
310
57
      } else {
311
57
        Stack.push_back(Succ);
312
57
        Queued[Succ->getBlockID()] = true;
313
57
      }
314
114
    }
315
138
  }
316
317
35
  return false;
318
81
}
319
320
static void visitReachableThrows(
321
    CFG *BodyCFG,
322
7.83k
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
323
7.83k
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
324
7.83k
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
325
25.3k
  for (CFGBlock *B : *BodyCFG) {
326
25.3k
    if (!Reachable[B->getBlockID()])
327
36
      continue;
328
52.3k
    
for (CFGElement &E : *B)25.2k
{
329
52.3k
      Optional<CFGStmt> S = E.getAs<CFGStmt>();
330
52.3k
      if (!S)
331
556
        continue;
332
51.7k
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
333
81
        Visit(Throw, *B);
334
51.7k
    }
335
25.2k
  }
336
7.83k
}
337
338
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
339
46
                                                 const FunctionDecl *FD) {
340
46
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
341
46
      FD->getTypeSourceInfo()) {
342
46
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
343
46
    if (S.getLangOpts().CPlusPlus11 &&
344
46
        (isa<CXXDestructorDecl>(FD) ||
345
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Delete35
||
346
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete34
)) {
347
12
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
348
12
                                         getAs<FunctionProtoType>())
349
12
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
350
12
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
351
12
            << FD->getExceptionSpecSourceRange();
352
12
    } else
353
34
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
354
34
          << FD->getExceptionSpecSourceRange();
355
46
  }
356
46
}
357
358
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
359
7.83k
                                        AnalysisDeclContext &AC) {
360
7.83k
  CFG *BodyCFG = AC.getCFG();
361
7.83k
  if (!BodyCFG)
362
2
    return;
363
7.83k
  if (BodyCFG->getExit().pred_empty())
364
0
    return;
365
7.83k
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
366
81
    if (throwEscapes(S, Throw, Block, BodyCFG))
367
46
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
368
81
  });
369
7.83k
}
370
371
149k
static bool isNoexcept(const FunctionDecl *FD) {
372
149k
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
373
149k
  if (FPT->isNothrow() || 
FD->hasAttr<NoThrowAttr>()142k
)
374
7.83k
    return true;
375
142k
  return false;
376
149k
}
377
378
//===----------------------------------------------------------------------===//
379
// Check for missing return value.
380
//===----------------------------------------------------------------------===//
381
382
enum ControlFlowKind {
383
  UnknownFallThrough,
384
  NeverFallThrough,
385
  MaybeFallThrough,
386
  AlwaysFallThrough,
387
  NeverFallThroughOrReturn
388
};
389
390
/// CheckFallThrough - Check that we don't fall off the end of a
391
/// Statement that should return a value.
392
///
393
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
394
/// MaybeFallThrough iff we might or might not fall off the end,
395
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
396
/// return.  We assume NeverFallThrough iff we never fall off the end of the
397
/// statement but we may return.  We assume that functions not marked noreturn
398
/// will return.
399
131k
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
400
131k
  CFG *cfg = AC.getCFG();
401
131k
  if (!cfg) 
return UnknownFallThrough30
;
402
403
  // The CFG leaves in dead things, and we don't want the dead code paths to
404
  // confuse us, so we mark all live things first.
405
131k
  llvm::BitVector live(cfg->getNumBlockIDs());
406
131k
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
407
131k
                                                          live);
408
409
131k
  bool AddEHEdges = AC.getAddEHEdges();
410
131k
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
411
    // When there are things remaining dead, and we didn't add EH edges
412
    // from CallExprs to the catch clauses, we have to go back and
413
    // mark them as live.
414
6.57k
    
for (const auto *B : *cfg)877
{
415
6.57k
      if (!live[B->getBlockID()]) {
416
1.54k
        if (B->pred_begin() == B->pred_end()) {
417
885
          const Stmt *Term = B->getTerminatorStmt();
418
885
          if (Term && 
isa<CXXTryStmt>(Term)121
)
419
            // When not adding EH edges from calls, catch clauses
420
            // can otherwise seem dead.  Avoid noting them as dead.
421
38
            count += reachable_code::ScanReachableFromBlock(B, live);
422
885
          continue;
423
885
        }
424
1.54k
      }
425
6.57k
    }
426
427
  // Now we know what is live, we check the live precessors of the exit block
428
  // and look for fall through paths, being careful to ignore normal returns,
429
  // and exceptional paths.
430
131k
  bool HasLiveReturn = false;
431
131k
  bool HasFakeEdge = false;
432
131k
  bool HasPlainEdge = false;
433
131k
  bool HasAbnormalEdge = false;
434
435
  // Ignore default cases that aren't likely to be reachable because all
436
  // enums in a switch(X) have explicit case statements.
437
131k
  CFGBlock::FilterOptions FO;
438
131k
  FO.IgnoreDefaultsWithCoveredEnums = 1;
439
440
131k
  for (CFGBlock::filtered_pred_iterator I =
441
131k
           cfg->getExit().filtered_pred_start_end(FO);
442
266k
       I.hasMore(); 
++I134k
) {
443
134k
    const CFGBlock &B = **I;
444
134k
    if (!live[B.getBlockID()])
445
686
      continue;
446
447
    // Skip blocks which contain an element marked as no-return. They don't
448
    // represent actually viable edges into the exit block, so mark them as
449
    // abnormal.
450
134k
    if (B.hasNoReturnElement()) {
451
314
      HasAbnormalEdge = true;
452
314
      continue;
453
314
    }
454
455
    // Destructors can appear after the 'return' in the CFG.  This is
456
    // normal.  We need to look pass the destructors for the return
457
    // statement (if it exists).
458
133k
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
459
460
135k
    for ( ; ri != re ; 
++ri1.64k
)
461
134k
      if (ri->getAs<CFGStmt>())
462
133k
        break;
463
464
    // No more CFGElements in the block?
465
133k
    if (ri == re) {
466
686
      const Stmt *Term = B.getTerminatorStmt();
467
686
      if (Term && 
(27
isa<CXXTryStmt>(Term)27
||
isa<ObjCAtTryStmt>(Term)8
)) {
468
23
        HasAbnormalEdge = true;
469
23
        continue;
470
23
      }
471
      // A labeled empty statement, or the entry block...
472
663
      HasPlainEdge = true;
473
663
      continue;
474
686
    }
475
476
133k
    CFGStmt CS = ri->castAs<CFGStmt>();
477
133k
    const Stmt *S = CS.getStmt();
478
133k
    if (isa<ReturnStmt>(S) || 
isa<CoreturnStmt>(S)360
) {
479
132k
      HasLiveReturn = true;
480
132k
      continue;
481
132k
    }
482
329
    if (isa<ObjCAtThrowStmt>(S)) {
483
5
      HasFakeEdge = true;
484
5
      continue;
485
5
    }
486
324
    if (isa<CXXThrowExpr>(S)) {
487
52
      HasFakeEdge = true;
488
52
      continue;
489
52
    }
490
272
    if (isa<MSAsmStmt>(S)) {
491
      // TODO: Verify this is correct.
492
16
      HasFakeEdge = true;
493
16
      HasLiveReturn = true;
494
16
      continue;
495
16
    }
496
256
    if (isa<CXXTryStmt>(S)) {
497
0
      HasAbnormalEdge = true;
498
0
      continue;
499
0
    }
500
256
    if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
501
0
      HasAbnormalEdge = true;
502
0
      continue;
503
0
    }
504
505
256
    HasPlainEdge = true;
506
256
  }
507
131k
  if (!HasPlainEdge) {
508
130k
    if (HasLiveReturn)
509
130k
      return NeverFallThrough;
510
298
    return NeverFallThroughOrReturn;
511
130k
  }
512
909
  if (HasAbnormalEdge || 
HasFakeEdge891
||
HasLiveReturn891
)
513
42
    return MaybeFallThrough;
514
  // This says AlwaysFallThrough for calls to functions that are not marked
515
  // noreturn, that don't return.  If people would like this warning to be more
516
  // accurate, such functions should be marked as noreturn.
517
867
  return AlwaysFallThrough;
518
909
}
519
520
namespace {
521
522
struct CheckFallThroughDiagnostics {
523
  unsigned diag_MaybeFallThrough_HasNoReturn;
524
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
525
  unsigned diag_AlwaysFallThrough_HasNoReturn;
526
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
527
  unsigned diag_NeverFallThroughOrReturn;
528
  enum { Function, Block, Lambda, Coroutine } funMode;
529
  SourceLocation FuncLoc;
530
531
247k
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
532
247k
    CheckFallThroughDiagnostics D;
533
247k
    D.FuncLoc = Func->getLocation();
534
247k
    D.diag_MaybeFallThrough_HasNoReturn =
535
247k
      diag::warn_falloff_noreturn_function;
536
247k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
537
247k
      diag::warn_maybe_falloff_nonvoid_function;
538
247k
    D.diag_AlwaysFallThrough_HasNoReturn =
539
247k
      diag::warn_falloff_noreturn_function;
540
247k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
541
247k
      diag::warn_falloff_nonvoid_function;
542
543
    // Don't suggest that virtual functions be marked "noreturn", since they
544
    // might be overridden by non-noreturn functions.
545
247k
    bool isVirtualMethod = false;
546
247k
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
547
47.5k
      isVirtualMethod = Method->isVirtual();
548
549
    // Don't suggest that template instantiations be marked "noreturn"
550
247k
    bool isTemplateInstantiation = false;
551
247k
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
552
241k
      isTemplateInstantiation = Function->isTemplateInstantiation();
553
554
247k
    if (!isVirtualMethod && 
!isTemplateInstantiation244k
)
555
228k
      D.diag_NeverFallThroughOrReturn =
556
228k
        diag::warn_suggest_noreturn_function;
557
18.4k
    else
558
18.4k
      D.diag_NeverFallThroughOrReturn = 0;
559
560
247k
    D.funMode = Function;
561
247k
    return D;
562
247k
  }
563
564
176
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
565
176
    CheckFallThroughDiagnostics D;
566
176
    D.FuncLoc = Func->getLocation();
567
176
    D.diag_MaybeFallThrough_HasNoReturn = 0;
568
176
    D.diag_MaybeFallThrough_ReturnsNonVoid =
569
176
        diag::warn_maybe_falloff_nonvoid_coroutine;
570
176
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
571
176
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
572
176
        diag::warn_falloff_nonvoid_coroutine;
573
176
    D.funMode = Coroutine;
574
176
    return D;
575
176
  }
576
577
2.12k
  static CheckFallThroughDiagnostics MakeForBlock() {
578
2.12k
    CheckFallThroughDiagnostics D;
579
2.12k
    D.diag_MaybeFallThrough_HasNoReturn =
580
2.12k
      diag::err_noreturn_block_has_return_expr;
581
2.12k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
582
2.12k
      diag::err_maybe_falloff_nonvoid_block;
583
2.12k
    D.diag_AlwaysFallThrough_HasNoReturn =
584
2.12k
      diag::err_noreturn_block_has_return_expr;
585
2.12k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
586
2.12k
      diag::err_falloff_nonvoid_block;
587
2.12k
    D.diag_NeverFallThroughOrReturn = 0;
588
2.12k
    D.funMode = Block;
589
2.12k
    return D;
590
2.12k
  }
591
592
3.66k
  static CheckFallThroughDiagnostics MakeForLambda() {
593
3.66k
    CheckFallThroughDiagnostics D;
594
3.66k
    D.diag_MaybeFallThrough_HasNoReturn =
595
3.66k
      diag::err_noreturn_lambda_has_return_expr;
596
3.66k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
597
3.66k
      diag::warn_maybe_falloff_nonvoid_lambda;
598
3.66k
    D.diag_AlwaysFallThrough_HasNoReturn =
599
3.66k
      diag::err_noreturn_lambda_has_return_expr;
600
3.66k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
601
3.66k
      diag::warn_falloff_nonvoid_lambda;
602
3.66k
    D.diag_NeverFallThroughOrReturn = 0;
603
3.66k
    D.funMode = Lambda;
604
3.66k
    return D;
605
3.66k
  }
606
607
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
608
253k
                        bool HasNoReturn) const {
609
253k
    if (funMode == Function) {
610
247k
      return (ReturnsVoid ||
611
247k
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
612
130k
                          FuncLoc)) &&
613
247k
             
(117k
!HasNoReturn117k
||
614
117k
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
615
171
                          FuncLoc)) &&
616
247k
             
(117k
!ReturnsVoid117k
||
617
117k
              
D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc)117k
);
618
247k
    }
619
5.96k
    if (funMode == Coroutine) {
620
176
      return (ReturnsVoid ||
621
176
              
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc)38
||
622
176
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
623
38
                          FuncLoc)) &&
624
176
             
(!HasNoReturn)138
;
625
176
    }
626
    // For blocks / lambdas.
627
5.79k
    return ReturnsVoid && 
!HasNoReturn4.47k
;
628
5.96k
  }
629
};
630
631
} // anonymous namespace
632
633
/// CheckFallThroughForBody - Check that we don't fall off the end of a
634
/// function that should return a value.  Check that we don't fall off the end
635
/// of a noreturn function.  We assume that functions and blocks not marked
636
/// noreturn will return.
637
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
638
                                    QualType BlockType,
639
                                    const CheckFallThroughDiagnostics &CD,
640
                                    AnalysisDeclContext &AC,
641
253k
                                    sema::FunctionScopeInfo *FSI) {
642
643
253k
  bool ReturnsVoid = false;
644
253k
  bool HasNoReturn = false;
645
253k
  bool IsCoroutine = FSI->isCoroutine();
646
647
253k
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
648
245k
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
649
178
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
650
245k
    else
651
245k
      ReturnsVoid = FD->getReturnType()->isVoidType();
652
245k
    HasNoReturn = FD->isNoReturn();
653
245k
  }
654
7.46k
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
655
5.34k
    ReturnsVoid = MD->getReturnType()->isVoidType();
656
5.34k
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
657
5.34k
  }
658
2.12k
  else if (isa<BlockDecl>(D)) {
659
2.12k
    if (const FunctionType *FT =
660
2.12k
          BlockType->getPointeeType()->getAs<FunctionType>()) {
661
2.12k
      if (FT->getReturnType()->isVoidType())
662
1.71k
        ReturnsVoid = true;
663
2.12k
      if (FT->getNoReturnAttr())
664
1
        HasNoReturn = true;
665
2.12k
    }
666
2.12k
  }
667
668
253k
  DiagnosticsEngine &Diags = S.getDiagnostics();
669
670
  // Short circuit for compilation speed.
671
253k
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
672
121k
      return;
673
131k
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
674
131k
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
675
861
    if (IsCoroutine)
676
14
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
677
847
    else
678
847
      S.Diag(Loc, DiagID);
679
861
  };
680
681
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
682
131k
  if (D->getAsFunction() && 
D->getAsFunction()->isCPUDispatchMultiVersion()128k
)
683
10
    return;
684
685
  // Either in a function body compound statement, or a function-try-block.
686
131k
  switch (CheckFallThrough(AC)) {
687
30
    case UnknownFallThrough:
688
30
      break;
689
690
42
    case MaybeFallThrough:
691
42
      if (HasNoReturn)
692
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
693
42
      else if (!ReturnsVoid)
694
41
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
695
42
      break;
696
867
    case AlwaysFallThrough:
697
867
      if (HasNoReturn)
698
101
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
699
766
      else if (!ReturnsVoid)
700
719
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
701
867
      break;
702
298
    case NeverFallThroughOrReturn:
703
298
      if (ReturnsVoid && 
!HasNoReturn78
&&
CD.diag_NeverFallThroughOrReturn9
) {
704
4
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
705
3
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
706
3
        } else 
if (const ObjCMethodDecl *1
MD1
= dyn_cast<ObjCMethodDecl>(D)) {
707
1
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
708
1
        } else {
709
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
710
0
        }
711
4
      }
712
298
      break;
713
130k
    case NeverFallThrough:
714
130k
      break;
715
131k
  }
716
131k
}
717
718
//===----------------------------------------------------------------------===//
719
// -Wuninitialized
720
//===----------------------------------------------------------------------===//
721
722
namespace {
723
/// ContainsReference - A visitor class to search for references to
724
/// a particular declaration (the needle) within any evaluated component of an
725
/// expression (recursively).
726
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
727
  bool FoundReference;
728
  const DeclRefExpr *Needle;
729
730
public:
731
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
732
733
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
734
70
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
735
736
301
  void VisitExpr(const Expr *E) {
737
    // Stop evaluating if we already have a reference.
738
301
    if (FoundReference)
739
25
      return;
740
741
276
    Inherited::VisitExpr(E);
742
276
  }
743
744
128
  void VisitDeclRefExpr(const DeclRefExpr *E) {
745
128
    if (E == Needle)
746
67
      FoundReference = true;
747
61
    else
748
61
      Inherited::VisitDeclRefExpr(E);
749
128
  }
750
751
70
  bool doesContainReference() const { return FoundReference; }
752
};
753
} // anonymous namespace
754
755
1.05k
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
756
1.05k
  QualType VariableTy = VD->getType().getCanonicalType();
757
1.05k
  if (VariableTy->isBlockPointerType() &&
758
1.05k
      
!VD->hasAttr<BlocksAttr>()3
) {
759
3
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
760
3
        << VD->getDeclName()
761
3
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
762
3
    return true;
763
3
  }
764
765
  // Don't issue a fixit if there is already an initializer.
766
1.04k
  if (VD->getInit())
767
3
    return false;
768
769
  // Don't suggest a fixit inside macros.
770
1.04k
  if (VD->getEndLoc().isMacroID())
771
2
    return false;
772
773
1.04k
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
774
775
  // Suggest possible initialization (if any).
776
1.04k
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
777
1.04k
  if (Init.empty())
778
5
    return false;
779
780
1.03k
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
781
1.03k
    << FixItHint::CreateInsertion(Loc, Init);
782
1.03k
  return true;
783
1.04k
}
784
785
/// Create a fixit to remove an if-like statement, on the assumption that its
786
/// condition is CondVal.
787
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
788
                          const Stmt *Else, bool CondVal,
789
23
                          FixItHint &Fixit1, FixItHint &Fixit2) {
790
23
  if (CondVal) {
791
    // If condition is always true, remove all but the 'then'.
792
12
    Fixit1 = FixItHint::CreateRemoval(
793
12
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
794
12
    if (Else) {
795
2
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
796
2
      Fixit2 =
797
2
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
798
2
    }
799
12
  } else {
800
    // If condition is always false, remove all but the 'else'.
801
11
    if (Else)
802
11
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
803
11
          If->getBeginLoc(), Else->getBeginLoc()));
804
0
    else
805
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
806
11
  }
807
23
}
808
809
/// DiagUninitUse -- Helper function to produce a diagnostic for an
810
/// uninitialized use of a variable.
811
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
812
1.04k
                          bool IsCapturedByBlock) {
813
1.04k
  bool Diagnosed = false;
814
815
1.04k
  switch (Use.getKind()) {
816
949
  case UninitUse::Always:
817
949
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
818
949
        << VD->getDeclName() << IsCapturedByBlock
819
949
        << Use.getUser()->getSourceRange();
820
949
    return;
821
822
7
  case UninitUse::AfterDecl:
823
13
  case UninitUse::AfterCall:
824
13
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
825
13
      << VD->getDeclName() << IsCapturedByBlock
826
13
      << (Use.getKind() == UninitUse::AfterDecl ? 
47
:
56
)
827
13
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
828
13
      << VD->getSourceRange();
829
13
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
830
13
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
831
13
    return;
832
833
32
  case UninitUse::Maybe:
834
85
  case UninitUse::Sometimes:
835
    // Carry on to report sometimes-uninitialized branches, if possible,
836
    // or a 'may be used uninitialized' diagnostic otherwise.
837
85
    break;
838
1.04k
  }
839
840
  // Diagnose each branch which leads to a sometimes-uninitialized use.
841
85
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
842
140
       I != E; 
++I55
) {
843
55
    assert(Use.getKind() == UninitUse::Sometimes);
844
845
0
    const Expr *User = Use.getUser();
846
55
    const Stmt *Term = I->Terminator;
847
848
    // Information used when building the diagnostic.
849
55
    unsigned DiagKind;
850
55
    StringRef Str;
851
55
    SourceRange Range;
852
853
    // FixIts to suppress the diagnostic by removing the dead condition.
854
    // For all binary terminators, branch 0 is taken if the condition is true,
855
    // and branch 1 is taken if the condition is false.
856
55
    int RemoveDiagKind = -1;
857
55
    const char *FixitStr =
858
55
        S.getLangOpts().CPlusPlus ? 
(47
I->Output47
?
"true"20
:
"false"27
)
859
55
                                  : 
(8
I->Output8
?
"1"4
:
"0"4
);
860
55
    FixItHint Fixit1, Fixit2;
861
862
55
    switch (Term ? Term->getStmtClass() : 
Stmt::DeclStmtClass0
) {
863
0
    default:
864
      // Don't know how to report this. Just fall back to 'may be used
865
      // uninitialized'. FIXME: Can this happen?
866
0
      continue;
867
868
    // "condition is true / condition is false".
869
19
    case Stmt::IfStmtClass: {
870
19
      const IfStmt *IS = cast<IfStmt>(Term);
871
19
      DiagKind = 0;
872
19
      Str = "if";
873
19
      Range = IS->getCond()->getSourceRange();
874
19
      RemoveDiagKind = 0;
875
19
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
876
19
                    I->Output, Fixit1, Fixit2);
877
19
      break;
878
0
    }
879
4
    case Stmt::ConditionalOperatorClass: {
880
4
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
881
4
      DiagKind = 0;
882
4
      Str = "?:";
883
4
      Range = CO->getCond()->getSourceRange();
884
4
      RemoveDiagKind = 0;
885
4
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
886
4
                    I->Output, Fixit1, Fixit2);
887
4
      break;
888
0
    }
889
12
    case Stmt::BinaryOperatorClass: {
890
12
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
891
12
      if (!BO->isLogicalOp())
892
0
        continue;
893
12
      DiagKind = 0;
894
12
      Str = BO->getOpcodeStr();
895
12
      Range = BO->getLHS()->getSourceRange();
896
12
      RemoveDiagKind = 0;
897
12
      if ((BO->getOpcode() == BO_LAnd && 
I->Output4
) ||
898
12
          
(10
BO->getOpcode() == BO_LOr10
&&
!I->Output8
))
899
        // true && y -> y, false || y -> y.
900
8
        Fixit1 = FixItHint::CreateRemoval(
901
8
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
902
4
      else
903
        // false && y -> false, true || y -> true.
904
4
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
905
12
      break;
906
12
    }
907
908
    // "loop is entered / loop is exited".
909
4
    case Stmt::WhileStmtClass:
910
4
      DiagKind = 1;
911
4
      Str = "while";
912
4
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
913
4
      RemoveDiagKind = 1;
914
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
915
4
      break;
916
4
    case Stmt::ForStmtClass:
917
4
      DiagKind = 1;
918
4
      Str = "for";
919
4
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
920
4
      RemoveDiagKind = 1;
921
4
      if (I->Output)
922
2
        Fixit1 = FixItHint::CreateRemoval(Range);
923
2
      else
924
2
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
925
4
      break;
926
4
    case Stmt::CXXForRangeStmtClass:
927
4
      if (I->Output == 1) {
928
        // The use occurs if a range-based for loop's body never executes.
929
        // That may be impossible, and there's no syntactic fix for this,
930
        // so treat it as a 'may be uninitialized' case.
931
2
        continue;
932
2
      }
933
2
      DiagKind = 1;
934
2
      Str = "for";
935
2
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
936
2
      break;
937
938
    // "condition is true / loop is exited".
939
4
    case Stmt::DoStmtClass:
940
4
      DiagKind = 2;
941
4
      Str = "do";
942
4
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
943
4
      RemoveDiagKind = 1;
944
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
945
4
      break;
946
947
    // "switch case is taken".
948
2
    case Stmt::CaseStmtClass:
949
2
      DiagKind = 3;
950
2
      Str = "case";
951
2
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
952
2
      break;
953
2
    case Stmt::DefaultStmtClass:
954
2
      DiagKind = 3;
955
2
      Str = "default";
956
2
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
957
2
      break;
958
55
    }
959
960
53
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
961
53
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
962
53
      << Str << I->Output << Range;
963
53
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
964
53
        << IsCapturedByBlock << User->getSourceRange();
965
53
    if (RemoveDiagKind != -1)
966
47
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
967
47
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
968
969
53
    Diagnosed = true;
970
53
  }
971
972
85
  if (!Diagnosed)
973
34
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
974
34
        << VD->getDeclName() << IsCapturedByBlock
975
34
        << Use.getUser()->getSourceRange();
976
85
}
977
978
/// Diagnose uninitialized const reference usages.
979
static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
980
14
                                             const UninitUse &Use) {
981
14
  S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
982
14
      << VD->getDeclName() << Use.getUser()->getSourceRange();
983
14
  return true;
984
14
}
985
986
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
987
/// uninitialized variable. This manages the different forms of diagnostic
988
/// emitted for particular types of uses. Returns true if the use was diagnosed
989
/// as a warning. If a particular use is one we omit warnings for, returns
990
/// false.
991
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
992
                                     const UninitUse &Use,
993
1.11k
                                     bool alwaysReportSelfInit = false) {
994
1.11k
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
995
    // Inspect the initializer of the variable declaration which is
996
    // being referenced prior to its initialization. We emit
997
    // specialized diagnostics for self-initialization, and we
998
    // specifically avoid warning about self references which take the
999
    // form of:
1000
    //
1001
    //   int x = x;
1002
    //
1003
    // This is used to indicate to GCC that 'x' is intentionally left
1004
    // uninitialized. Proven code paths which access 'x' in
1005
    // an uninitialized state after this will still warn.
1006
1.11k
    if (const Expr *Initializer = VD->getInit()) {
1007
70
      if (!alwaysReportSelfInit && 
DRE == Initializer->IgnoreParenImpCasts()67
)
1008
0
        return false;
1009
1010
70
      ContainsReference CR(S.Context, DRE);
1011
70
      CR.Visit(Initializer);
1012
70
      if (CR.doesContainReference()) {
1013
67
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1014
67
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1015
67
        return true;
1016
67
      }
1017
70
    }
1018
1019
1.04k
    DiagUninitUse(S, VD, Use, false);
1020
1.04k
  } else {
1021
5
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1022
5
    if (VD->getType()->isBlockPointerType() && 
!VD->hasAttr<BlocksAttr>()3
)
1023
3
      S.Diag(BE->getBeginLoc(),
1024
3
             diag::warn_uninit_byref_blockvar_captured_by_block)
1025
3
          << VD->getDeclName()
1026
3
          << VD->getType().getQualifiers().hasObjCLifetime();
1027
2
    else
1028
2
      DiagUninitUse(S, VD, Use, true);
1029
5
  }
1030
1031
  // Report where the variable was declared when the use wasn't within
1032
  // the initializer of that declaration & we didn't already suggest
1033
  // an initialization fixit.
1034
1.05k
  if (!SuggestInitializationFixit(S, VD))
1035
10
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1036
10
        << VD->getDeclName();
1037
1038
1.05k
  return true;
1039
1.11k
}
1040
1041
namespace {
1042
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1043
  public:
1044
    FallthroughMapper(Sema &S)
1045
      : FoundSwitchStatements(false),
1046
122
        S(S) {
1047
122
    }
1048
1049
122
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1050
1051
53
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1052
53
      bool Found = FallthroughStmts.erase(Stmt);
1053
53
      assert(Found);
1054
0
      (void)Found;
1055
53
    }
1056
1057
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1058
1059
96
    const AttrStmts &getFallthroughStmts() const {
1060
96
      return FallthroughStmts;
1061
96
    }
1062
1063
78
    void fillReachableBlocks(CFG *Cfg) {
1064
78
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1065
0
      std::deque<const CFGBlock *> BlockQueue;
1066
1067
78
      ReachableBlocks.insert(&Cfg->getEntry());
1068
78
      BlockQueue.push_back(&Cfg->getEntry());
1069
      // Mark all case blocks reachable to avoid problems with switching on
1070
      // constants, covered enums, etc.
1071
      // These blocks can contain fall-through annotations, and we don't want to
1072
      // issue a warn_fallthrough_attr_unreachable for them.
1073
671
      for (const auto *B : *Cfg) {
1074
671
        const Stmt *L = B->getLabel();
1075
671
        if (L && 
isa<SwitchCase>(L)275
&&
ReachableBlocks.insert(B).second272
)
1076
272
          BlockQueue.push_back(B);
1077
671
      }
1078
1079
714
      while (!BlockQueue.empty()) {
1080
636
        const CFGBlock *P = BlockQueue.front();
1081
636
        BlockQueue.pop_front();
1082
849
        for (const CFGBlock *B : P->succs()) {
1083
849
          if (B && 
ReachableBlocks.insert(B).second826
)
1084
286
            BlockQueue.push_back(B);
1085
849
        }
1086
636
      }
1087
78
    }
1088
1089
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1090
272
                                   bool IsTemplateInstantiation) {
1091
272
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1092
1093
0
      int UnannotatedCnt = 0;
1094
272
      AnnotatedCnt = 0;
1095
1096
272
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1097
713
      while (!BlockQueue.empty()) {
1098
441
        const CFGBlock *P = BlockQueue.front();
1099
441
        BlockQueue.pop_front();
1100
441
        if (!P) 
continue7
;
1101
1102
434
        const Stmt *Term = P->getTerminatorStmt();
1103
434
        if (Term && 
isa<SwitchStmt>(Term)292
)
1104
270
          continue; // Switch statement, good.
1105
1106
164
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1107
164
        if (SW && 
SW->getSubStmt() == B.getLabel()123
&&
P->begin() == P->end()4
)
1108
4
          continue; // Previous case label has no statements, good.
1109
1110
160
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1111
160
        if (L && 
L->getSubStmt() == B.getLabel()3
&&
P->begin() == P->end()3
)
1112
3
          continue; // Case label is preceded with a normal label, good.
1113
1114
157
        if (!ReachableBlocks.count(P)) {
1115
36
          for (const CFGElement &Elem : llvm::reverse(*P)) {
1116
36
            if (Optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1117
34
              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1118
                // Don't issue a warning for an unreachable fallthrough
1119
                // attribute in template instantiations as it may not be
1120
                // unreachable in all instantiations of the template.
1121
10
                if (!IsTemplateInstantiation)
1122
9
                  S.Diag(AS->getBeginLoc(),
1123
9
                         diag::warn_unreachable_fallthrough_attr);
1124
10
                markFallthroughVisited(AS);
1125
10
                ++AnnotatedCnt;
1126
10
                break;
1127
10
              }
1128
              // Don't care about other unreachable statements.
1129
34
            }
1130
36
          }
1131
          // If there are no unreachable statements, this may be a special
1132
          // case in CFG:
1133
          // case X: {
1134
          //    A a;  // A has a destructor.
1135
          //    break;
1136
          // }
1137
          // // <<<< This place is represented by a 'hanging' CFG block.
1138
          // case Y:
1139
22
          continue;
1140
22
        }
1141
1142
135
        const Stmt *LastStmt = getLastStmt(*P);
1143
135
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1144
43
          markFallthroughVisited(AS);
1145
43
          ++AnnotatedCnt;
1146
43
          continue; // Fallthrough annotation, good.
1147
43
        }
1148
1149
92
        if (!LastStmt) { // This block contains no executable statements.
1150
          // Traverse its predecessors.
1151
2
          std::copy(P->pred_begin(), P->pred_end(),
1152
2
                    std::back_inserter(BlockQueue));
1153
2
          continue;
1154
2
        }
1155
1156
90
        ++UnannotatedCnt;
1157
90
      }
1158
272
      return !!UnannotatedCnt;
1159
272
    }
1160
1161
    // RecursiveASTVisitor setup.
1162
13
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1163
1164
70
    bool VisitAttributedStmt(AttributedStmt *S) {
1165
70
      if (asFallThroughAttr(S))
1166
69
        FallthroughStmts.insert(S);
1167
70
      return true;
1168
70
    }
1169
1170
103
    bool VisitSwitchStmt(SwitchStmt *S) {
1171
103
      FoundSwitchStatements = true;
1172
103
      return true;
1173
103
    }
1174
1175
    // We don't want to traverse local type declarations. We analyze their
1176
    // methods separately.
1177
28
    bool TraverseDecl(Decl *D) { return true; }
1178
1179
    // We analyze lambda bodies separately. Skip them here.
1180
1
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1181
      // Traverse the captures, but not the body.
1182
1
      for (const auto C : zip(LE->captures(), LE->capture_inits()))
1183
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1184
1
      return true;
1185
1
    }
1186
1187
  private:
1188
1189
239
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1190
239
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1191
123
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1192
122
          return AS;
1193
123
      }
1194
117
      return nullptr;
1195
239
    }
1196
1197
135
    static const Stmt *getLastStmt(const CFGBlock &B) {
1198
135
      if (const Stmt *Term = B.getTerminatorStmt())
1199
18
        return Term;
1200
117
      for (const CFGElement &Elem : llvm::reverse(B))
1201
111
        if (Optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1202
111
          return CS->getStmt();
1203
      // Workaround to detect a statement thrown out by CFGBuilder:
1204
      //   case X: {} case Y:
1205
      //   case X: ; case Y:
1206
6
      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1207
4
        if (!isa<SwitchCase>(SW->getSubStmt()))
1208
4
          return SW->getSubStmt();
1209
1210
2
      return nullptr;
1211
6
    }
1212
1213
    bool FoundSwitchStatements;
1214
    AttrStmts FallthroughStmts;
1215
    Sema &S;
1216
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1217
  };
1218
} // anonymous namespace
1219
1220
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1221
65
                                            SourceLocation Loc) {
1222
65
  TokenValue FallthroughTokens[] = {
1223
65
    tok::l_square, tok::l_square,
1224
65
    PP.getIdentifierInfo("fallthrough"),
1225
65
    tok::r_square, tok::r_square
1226
65
  };
1227
1228
65
  TokenValue ClangFallthroughTokens[] = {
1229
65
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1230
65
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1231
65
    tok::r_square, tok::r_square
1232
65
  };
1233
1234
65
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && 
!PP.getLangOpts().C2x39
;
1235
1236
65
  StringRef MacroName;
1237
65
  if (PreferClangAttr)
1238
38
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1239
65
  if (MacroName.empty())
1240
58
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1241
65
  if (MacroName.empty() && 
!PreferClangAttr50
)
1242
22
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1243
65
  if (MacroName.empty()) {
1244
40
    if (!PreferClangAttr)
1245
12
      MacroName = "[[fallthrough]]";
1246
28
    else if (PP.getLangOpts().CPlusPlus)
1247
21
      MacroName = "[[clang::fallthrough]]";
1248
7
    else
1249
7
      MacroName = "__attribute__((fallthrough))";
1250
40
  }
1251
65
  return MacroName;
1252
65
}
1253
1254
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1255
122
                                            bool PerFunction) {
1256
122
  FallthroughMapper FM(S);
1257
122
  FM.TraverseStmt(AC.getBody());
1258
1259
122
  if (!FM.foundSwitchStatements())
1260
43
    return;
1261
1262
79
  if (PerFunction && 
FM.getFallthroughStmts().empty()18
)
1263
1
    return;
1264
1265
78
  CFG *Cfg = AC.getCFG();
1266
1267
78
  if (!Cfg)
1268
0
    return;
1269
1270
78
  FM.fillReachableBlocks(Cfg);
1271
1272
671
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1273
671
    const Stmt *Label = B->getLabel();
1274
1275
671
    if (!Label || 
!isa<SwitchCase>(Label)275
)
1276
399
      continue;
1277
1278
272
    int AnnotatedCnt;
1279
1280
272
    bool IsTemplateInstantiation = false;
1281
272
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1282
269
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1283
272
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1284
272
                                      IsTemplateInstantiation))
1285
190
      continue;
1286
1287
82
    S.Diag(Label->getBeginLoc(),
1288
82
           PerFunction ? 
diag::warn_unannotated_fallthrough_per_function10
1289
82
                       : 
diag::warn_unannotated_fallthrough72
);
1290
1291
82
    if (!AnnotatedCnt) {
1292
81
      SourceLocation L = Label->getBeginLoc();
1293
81
      if (L.isMacroID())
1294
10
        continue;
1295
1296
71
      const Stmt *Term = B->getTerminatorStmt();
1297
      // Skip empty cases.
1298
115
      while (B->empty() && 
!Term50
&&
B->succ_size() == 144
) {
1299
44
        B = *B->succ_begin();
1300
44
        Term = B->getTerminatorStmt();
1301
44
      }
1302
71
      if (!(B->empty() && 
Term6
&&
isa<BreakStmt>(Term)6
)) {
1303
65
        Preprocessor &PP = S.getPreprocessor();
1304
65
        StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1305
65
        SmallString<64> TextToInsert(AnnotationSpelling);
1306
65
        TextToInsert += "; ";
1307
65
        S.Diag(L, diag::note_insert_fallthrough_fixit)
1308
65
            << AnnotationSpelling
1309
65
            << FixItHint::CreateInsertion(L, TextToInsert);
1310
65
      }
1311
71
      S.Diag(L, diag::note_insert_break_fixit)
1312
71
          << FixItHint::CreateInsertion(L, "break; ");
1313
71
    }
1314
82
  }
1315
1316
78
  for (const auto *F : FM.getFallthroughStmts())
1317
16
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1318
78
}
1319
1320
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1321
69
                     const Stmt *S) {
1322
69
  assert(S);
1323
1324
417
  do {
1325
417
    switch (S->getStmtClass()) {
1326
2
    case Stmt::ForStmtClass:
1327
6
    case Stmt::WhileStmtClass:
1328
8
    case Stmt::CXXForRangeStmtClass:
1329
18
    case Stmt::ObjCForCollectionStmtClass:
1330
18
      return true;
1331
4
    case Stmt::DoStmtClass: {
1332
4
      Expr::EvalResult Result;
1333
4
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1334
2
        return true;
1335
2
      return Result.Val.getInt().getBoolValue();
1336
4
    }
1337
395
    default:
1338
395
      break;
1339
417
    }
1340
417
  } while (
(S = PM.getParent(S))395
);
1341
1342
47
  return false;
1343
69
}
1344
1345
static void diagnoseRepeatedUseOfWeak(Sema &S,
1346
                                      const sema::FunctionScopeInfo *CurFn,
1347
                                      const Decl *D,
1348
112
                                      const ParentMap &PM) {
1349
112
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1350
112
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1351
112
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1352
112
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1353
112
  StmtUsesPair;
1354
1355
112
  ASTContext &Ctx = S.getASTContext();
1356
1357
112
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1358
1359
  // Extract all weak objects that are referenced more than once.
1360
112
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1361
112
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1362
267
       I != E; 
++I155
) {
1363
155
    const WeakUseVector &Uses = I->second;
1364
1365
    // Find the first read of the weak object.
1366
155
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1367
217
    for ( ; UI != UE; 
++UI62
) {
1368
197
      if (UI->isUnsafe())
1369
135
        break;
1370
197
    }
1371
1372
    // If there were only writes to this object, don't warn.
1373
155
    if (UI == UE)
1374
20
      continue;
1375
1376
    // If there was only one read, followed by any number of writes, and the
1377
    // read is not within a loop, don't warn. Additionally, don't warn in a
1378
    // loop if the base object is a local variable -- local variables are often
1379
    // changed in loops.
1380
135
    if (UI == Uses.begin()) {
1381
117
      WeakUseVector::const_iterator UI2 = UI;
1382
143
      for (++UI2; UI2 != UE; 
++UI226
)
1383
74
        if (UI2->isUnsafe())
1384
48
          break;
1385
1386
117
      if (UI2 == UE) {
1387
69
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1388
49
          continue;
1389
1390
20
        const WeakObjectProfileTy &Profile = I->first;
1391
20
        if (!Profile.isExactProfile())
1392
2
          continue;
1393
1394
18
        const NamedDecl *Base = Profile.getBase();
1395
18
        if (!Base)
1396
2
          Base = Profile.getProperty();
1397
18
        assert(Base && "A profile always has a base or property.");
1398
1399
18
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1400
18
          if (BaseVar->hasLocalStorage() && 
!isa<ParmVarDecl>(Base)16
)
1401
2
            continue;
1402
18
      }
1403
117
    }
1404
1405
82
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1406
82
  }
1407
1408
112
  if (UsesByStmt.empty())
1409
46
    return;
1410
1411
  // Sort by first use so that we emit the warnings in a deterministic order.
1412
66
  SourceManager &SM = S.getSourceManager();
1413
66
  llvm::sort(UsesByStmt,
1414
66
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1415
23
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1416
23
                                                   RHS.first->getBeginLoc());
1417
23
             });
1418
1419
  // Classify the current code body for better warning text.
1420
  // This enum should stay in sync with the cases in
1421
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1422
  // FIXME: Should we use a common classification enum and the same set of
1423
  // possibilities all throughout Sema?
1424
66
  enum {
1425
66
    Function,
1426
66
    Method,
1427
66
    Block,
1428
66
    Lambda
1429
66
  } FunctionKind;
1430
1431
66
  if (isa<sema::BlockScopeInfo>(CurFn))
1432
2
    FunctionKind = Block;
1433
64
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1434
0
    FunctionKind = Lambda;
1435
64
  else if (isa<ObjCMethodDecl>(D))
1436
6
    FunctionKind = Method;
1437
58
  else
1438
58
    FunctionKind = Function;
1439
1440
  // Iterate through the sorted problems and emit warnings for each.
1441
82
  for (const auto &P : UsesByStmt) {
1442
82
    const Stmt *FirstRead = P.first;
1443
82
    const WeakObjectProfileTy &Key = P.second->first;
1444
82
    const WeakUseVector &Uses = P.second->second;
1445
1446
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1447
    // may not contain enough information to determine that these are different
1448
    // properties. We can only be 100% sure of a repeated use in certain cases,
1449
    // and we adjust the diagnostic kind accordingly so that the less certain
1450
    // case can be turned off if it is too noisy.
1451
82
    unsigned DiagKind;
1452
82
    if (Key.isExactProfile())
1453
68
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1454
14
    else
1455
14
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1456
1457
    // Classify the weak object being accessed for better warning text.
1458
    // This enum should stay in sync with the cases in
1459
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1460
82
    enum {
1461
82
      Variable,
1462
82
      Property,
1463
82
      ImplicitProperty,
1464
82
      Ivar
1465
82
    } ObjectKind;
1466
1467
82
    const NamedDecl *KeyProp = Key.getProperty();
1468
82
    if (isa<VarDecl>(KeyProp))
1469
6
      ObjectKind = Variable;
1470
76
    else if (isa<ObjCPropertyDecl>(KeyProp))
1471
64
      ObjectKind = Property;
1472
12
    else if (isa<ObjCMethodDecl>(KeyProp))
1473
4
      ObjectKind = ImplicitProperty;
1474
8
    else if (isa<ObjCIvarDecl>(KeyProp))
1475
8
      ObjectKind = Ivar;
1476
0
    else
1477
0
      llvm_unreachable("Unexpected weak object kind!");
1478
1479
    // Do not warn about IBOutlet weak property receivers being set to null
1480
    // since they are typically only used from the main thread.
1481
82
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1482
64
      if (Prop->hasAttr<IBOutletAttr>())
1483
4
        continue;
1484
1485
    // Show the first time the object was read.
1486
78
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1487
78
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1488
78
        << FirstRead->getSourceRange();
1489
1490
    // Print all the other accesses as notes.
1491
164
    for (const auto &Use : Uses) {
1492
164
      if (Use.getUseExpr() == FirstRead)
1493
78
        continue;
1494
86
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1495
86
             diag::note_arc_weak_also_accessed_here)
1496
86
          << Use.getUseExpr()->getSourceRange();
1497
86
    }
1498
78
  }
1499
66
}
1500
1501
namespace clang {
1502
namespace {
1503
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1504
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1505
typedef std::list<DelayedDiag> DiagList;
1506
1507
struct SortDiagBySourceLocation {
1508
  SourceManager &SM;
1509
2.34k
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1510
1511
2.55k
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1512
    // Although this call will be slow, this is only called when outputting
1513
    // multiple warnings.
1514
2.55k
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1515
2.55k
  }
1516
};
1517
} // anonymous namespace
1518
} // namespace clang
1519
1520
namespace {
1521
class UninitValsDiagReporter : public UninitVariablesHandler {
1522
  Sema &S;
1523
  typedef SmallVector<UninitUse, 2> UsesVec;
1524
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1525
  // Prefer using MapVector to DenseMap, so that iteration order will be
1526
  // the same as insertion order. This is needed to obtain a deterministic
1527
  // order of diagnostics when calling flushDiagnostics().
1528
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1529
  UsesMap uses;
1530
  UsesMap constRefUses;
1531
1532
public:
1533
54.4k
  UninitValsDiagReporter(Sema &S) : S(S) {}
1534
54.4k
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1535
1536
2.17k
  MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1537
2.17k
    MappedType &V = um[vd];
1538
2.17k
    if (!V.getPointer())
1539
1.14k
      V.setPointer(new UsesVec());
1540
2.17k
    return V;
1541
2.17k
  }
1542
1543
  void handleUseOfUninitVariable(const VarDecl *vd,
1544
2.14k
                                 const UninitUse &use) override {
1545
2.14k
    getUses(uses, vd).getPointer()->push_back(use);
1546
2.14k
  }
1547
1548
  void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1549
15
                                         const UninitUse &use) override {
1550
15
    getUses(constRefUses, vd).getPointer()->push_back(use);
1551
15
  }
1552
1553
9
  void handleSelfInit(const VarDecl *vd) override {
1554
9
    getUses(uses, vd).setInt(true);
1555
9
    getUses(constRefUses, vd).setInt(true);
1556
9
  }
1557
1558
54.4k
  void flushDiagnostics() {
1559
54.4k
    for (const auto &P : uses) {
1560
1.12k
      const VarDecl *vd = P.first;
1561
1.12k
      const MappedType &V = P.second;
1562
1563
1.12k
      UsesVec *vec = V.getPointer();
1564
1.12k
      bool hasSelfInit = V.getInt();
1565
1566
      // Specially handle the case where we have uses of an uninitialized
1567
      // variable, but the root cause is an idiomatic self-init.  We want
1568
      // to report the diagnostic at the self-init since that is the root cause.
1569
1.12k
      if (!vec->empty() && 
hasSelfInit1.11k
&&
hasAlwaysUninitializedUse(vec)4
)
1570
2
        DiagnoseUninitializedUse(S, vd,
1571
2
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1572
2
                                           /* isAlwaysUninit */ true),
1573
2
                                 /* alwaysReportSelfInit */ true);
1574
1.11k
      else {
1575
        // Sort the uses by their SourceLocations.  While not strictly
1576
        // guaranteed to produce them in line/column order, this will provide
1577
        // a stable ordering.
1578
1.11k
        llvm::sort(vec->begin(), vec->end(),
1579
1.11k
                   [](const UninitUse &a, const UninitUse &b) {
1580
          // Prefer a more confident report over a less confident one.
1581
1.02k
          if (a.getKind() != b.getKind())
1582
5
            return a.getKind() > b.getKind();
1583
1.02k
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1584
1.02k
        });
1585
1586
1.11k
        for (const auto &U : *vec) {
1587
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1588
1.11k
          UninitUse Use = hasSelfInit ? 
UninitUse(U.getUser(), false)2
:
U1.11k
;
1589
1590
1.11k
          if (DiagnoseUninitializedUse(S, vd, Use))
1591
            // Skip further diagnostics for this variable. We try to warn only
1592
            // on the first point at which a variable is used uninitialized.
1593
1.11k
            break;
1594
1.11k
        }
1595
1.11k
      }
1596
1597
      // Release the uses vector.
1598
1.12k
      delete vec;
1599
1.12k
    }
1600
1601
54.4k
    uses.clear();
1602
1603
    // Flush all const reference uses diags.
1604
54.4k
    for (const auto &P : constRefUses) {
1605
23
      const VarDecl *vd = P.first;
1606
23
      const MappedType &V = P.second;
1607
1608
23
      UsesVec *vec = V.getPointer();
1609
23
      bool hasSelfInit = V.getInt();
1610
1611
23
      if (!vec->empty() && 
hasSelfInit15
&&
hasAlwaysUninitializedUse(vec)1
)
1612
1
        DiagnoseUninitializedUse(S, vd,
1613
1
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1614
1
                                           /* isAlwaysUninit */ true),
1615
1
                                 /* alwaysReportSelfInit */ true);
1616
22
      else {
1617
22
        for (const auto &U : *vec) {
1618
14
          if (DiagnoseUninitializedConstRefUse(S, vd, U))
1619
14
            break;
1620
14
        }
1621
22
      }
1622
1623
      // Release the uses vector.
1624
23
      delete vec;
1625
23
    }
1626
1627
54.4k
    constRefUses.clear();
1628
54.4k
  }
1629
1630
private:
1631
5
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1632
5
    return llvm::any_of(*vec, [](const UninitUse &U) {
1633
5
      return U.getKind() == UninitUse::Always ||
1634
5
             
U.getKind() == UninitUse::AfterCall2
||
1635
5
             
U.getKind() == UninitUse::AfterDecl2
;
1636
5
    });
1637
5
  }
1638
};
1639
1640
/// Inter-procedural data for the called-once checker.
1641
class CalledOnceInterProceduralData {
1642
public:
1643
  // Add the delayed warning for the given block.
1644
  void addDelayedWarning(const BlockDecl *Block,
1645
8
                         PartialDiagnosticAt &&Warning) {
1646
8
    DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1647
8
  }
1648
  // Report all of the warnings we've gathered for the given block.
1649
9
  void flushWarnings(const BlockDecl *Block, Sema &S) {
1650
9
    for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1651
5
      S.Diag(Delayed.first, Delayed.second);
1652
1653
9
    discardWarnings(Block);
1654
9
  }
1655
  // Discard all of the warnings we've gathered for the given block.
1656
16
  void discardWarnings(const BlockDecl *Block) {
1657
16
    DelayedBlockWarnings.erase(Block);
1658
16
  }
1659
1660
private:
1661
  using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1662
  llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1663
};
1664
1665
class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1666
public:
1667
  CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1668
10.5k
      : S(S), Data(Data) {}
1669
  void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1670
                        const Expr *PrevCall, bool IsCompletionHandler,
1671
29
                        bool Poised) override {
1672
29
    auto DiagToReport = IsCompletionHandler
1673
29
                            ? 
diag::warn_completion_handler_called_twice7
1674
29
                            : 
diag::warn_called_once_gets_called_twice22
;
1675
29
    S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1676
29
    S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1677
29
        << Poised;
1678
29
  }
1679
1680
  void handleNeverCalled(const ParmVarDecl *Parameter,
1681
10
                         bool IsCompletionHandler) override {
1682
10
    auto DiagToReport = IsCompletionHandler
1683
10
                            ? 
diag::warn_completion_handler_never_called4
1684
10
                            : 
diag::warn_called_once_never_called6
;
1685
10
    S.Diag(Parameter->getBeginLoc(), DiagToReport)
1686
10
        << Parameter << /* Captured */ false;
1687
10
  }
1688
1689
  void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1690
                         const Stmt *Where, NeverCalledReason Reason,
1691
                         bool IsCalledDirectly,
1692
43
                         bool IsCompletionHandler) override {
1693
43
    auto DiagToReport = IsCompletionHandler
1694
43
                            ? 
diag::warn_completion_handler_never_called_when19
1695
43
                            : 
diag::warn_called_once_never_called_when24
;
1696
43
    PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1697
43
                                                          << Parameter
1698
43
                                                          << IsCalledDirectly
1699
43
                                                          << (unsigned)Reason);
1700
1701
43
    if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1702
      // We shouldn't report these warnings on blocks immediately
1703
8
      Data.addDelayedWarning(Block, std::move(Warning));
1704
35
    } else {
1705
35
      S.Diag(Warning.first, Warning.second);
1706
35
    }
1707
43
  }
1708
1709
  void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1710
                                 const Decl *Where,
1711
0
                                 bool IsCompletionHandler) override {
1712
0
    auto DiagToReport = IsCompletionHandler
1713
0
                            ? diag::warn_completion_handler_never_called
1714
0
                            : diag::warn_called_once_never_called;
1715
0
    S.Diag(Where->getBeginLoc(), DiagToReport)
1716
0
        << Parameter << /* Captured */ true;
1717
0
  }
1718
1719
  void
1720
9
  handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1721
9
    Data.flushWarnings(Block, S);
1722
9
  }
1723
1724
7
  void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1725
7
    Data.discardWarnings(Block);
1726
7
  }
1727
1728
private:
1729
  Sema &S;
1730
  CalledOnceInterProceduralData &Data;
1731
};
1732
1733
constexpr unsigned CalledOnceWarnings[] = {
1734
    diag::warn_called_once_never_called,
1735
    diag::warn_called_once_never_called_when,
1736
    diag::warn_called_once_gets_called_twice};
1737
1738
constexpr unsigned CompletionHandlerWarnings[]{
1739
    diag::warn_completion_handler_never_called,
1740
    diag::warn_completion_handler_never_called_when,
1741
    diag::warn_completion_handler_called_twice};
1742
1743
bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1744
                                 const DiagnosticsEngine &Diags,
1745
21.1k
                                 SourceLocation At) {
1746
41.9k
  return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1747
41.9k
    return !Diags.isIgnored(DiagID, At);
1748
41.9k
  });
1749
21.1k
}
1750
1751
bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1752
10.5k
                                        SourceLocation At) {
1753
10.5k
  return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1754
10.5k
}
1755
1756
bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1757
10.5k
                                       SourceLocation At) {
1758
10.5k
  return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1759
10.5k
         
shouldAnalyzeCalledOnceConventions(Diags, At)30
;
1760
10.5k
}
1761
} // anonymous namespace
1762
1763
//===----------------------------------------------------------------------===//
1764
// -Wthread-safety
1765
//===----------------------------------------------------------------------===//
1766
namespace clang {
1767
namespace threadSafety {
1768
namespace {
1769
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1770
  Sema &S;
1771
  DiagList Warnings;
1772
  SourceLocation FunLocation, FunEndLocation;
1773
1774
  const FunctionDecl *CurrentFunction;
1775
  bool Verbose;
1776
1777
2.50k
  OptionalNotes getNotes() const {
1778
2.50k
    if (Verbose && 
CurrentFunction14
) {
1779
14
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1780
14
                                S.PDiag(diag::note_thread_warning_in_fun)
1781
14
                                    << CurrentFunction);
1782
14
      return OptionalNotes(1, FNote);
1783
14
    }
1784
2.49k
    return OptionalNotes();
1785
2.50k
  }
1786
1787
450
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1788
450
    OptionalNotes ONS(1, Note);
1789
450
    if (Verbose && 
CurrentFunction6
) {
1790
6
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1791
6
                                S.PDiag(diag::note_thread_warning_in_fun)
1792
6
                                    << CurrentFunction);
1793
6
      ONS.push_back(std::move(FNote));
1794
6
    }
1795
450
    return ONS;
1796
450
  }
1797
1798
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1799
0
                         const PartialDiagnosticAt &Note2) const {
1800
0
    OptionalNotes ONS;
1801
0
    ONS.push_back(Note1);
1802
0
    ONS.push_back(Note2);
1803
0
    if (Verbose && CurrentFunction) {
1804
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1805
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1806
0
                                    << CurrentFunction);
1807
0
      ONS.push_back(std::move(FNote));
1808
0
    }
1809
0
    return ONS;
1810
0
  }
1811
1812
278
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1813
278
    return LocLocked.isValid()
1814
278
               ? getNotes(PartialDiagnosticAt(
1815
278
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1816
278
               : 
getNotes()0
;
1817
278
  }
1818
1819
  OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1820
106
                                     StringRef Kind) {
1821
106
    return LocUnlocked.isValid()
1822
106
               ? getNotes(PartialDiagnosticAt(
1823
54
                     LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1824
106
               : 
getNotes()52
;
1825
106
  }
1826
1827
 public:
1828
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1829
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1830
2.24k
      CurrentFunction(nullptr), Verbose(false) {}
1831
1832
41
  void setVerbose(bool b) { Verbose = b; }
1833
1834
  /// Emit all buffered diagnostics in order of sourcelocation.
1835
  /// We need to output diagnostics produced while iterating through
1836
  /// the lockset in deterministic order, so this function orders diagnostics
1837
  /// and outputs them.
1838
2.24k
  void emitDiagnostics() {
1839
2.24k
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1840
2.95k
    for (const auto &Diag : Warnings) {
1841
2.95k
      S.Diag(Diag.first.first, Diag.first.second);
1842
2.95k
      for (const auto &Note : Diag.second)
1843
470
        S.Diag(Note.first, Note.second);
1844
2.95k
    }
1845
2.24k
  }
1846
1847
0
  void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1848
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1849
0
                                         << Loc);
1850
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1851
0
  }
1852
1853
  void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1854
106
                             SourceLocation LocPreviousUnlock) override {
1855
106
    if (Loc.isInvalid())
1856
0
      Loc = FunLocation;
1857
106
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1858
106
                                         << Kind << LockName);
1859
106
    Warnings.emplace_back(std::move(Warning),
1860
106
                          makeUnlockedHereNote(LocPreviousUnlock, Kind));
1861
106
  }
1862
1863
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1864
                                 LockKind Expected, LockKind Received,
1865
                                 SourceLocation LocLocked,
1866
18
                                 SourceLocation LocUnlock) override {
1867
18
    if (LocUnlock.isInvalid())
1868
0
      LocUnlock = FunLocation;
1869
18
    PartialDiagnosticAt Warning(
1870
18
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1871
18
                       << Kind << LockName << Received << Expected);
1872
18
    Warnings.emplace_back(std::move(Warning),
1873
18
                          makeLockedHereNote(LocLocked, Kind));
1874
18
  }
1875
1876
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1877
79
                        SourceLocation LocDoubleLock) override {
1878
79
    if (LocDoubleLock.isInvalid())
1879
0
      LocDoubleLock = FunLocation;
1880
79
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1881
79
                                                   << Kind << LockName);
1882
79
    Warnings.emplace_back(std::move(Warning),
1883
79
                          makeLockedHereNote(LocLocked, Kind));
1884
79
  }
1885
1886
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1887
                                 SourceLocation LocLocked,
1888
                                 SourceLocation LocEndOfScope,
1889
181
                                 LockErrorKind LEK) override {
1890
181
    unsigned DiagID = 0;
1891
181
    switch (LEK) {
1892
42
      case LEK_LockedSomePredecessors:
1893
42
        DiagID = diag::warn_lock_some_predecessors;
1894
42
        break;
1895
42
      case LEK_LockedSomeLoopIterations:
1896
42
        DiagID = diag::warn_expecting_lock_held_on_loop;
1897
42
        break;
1898
65
      case LEK_LockedAtEndOfFunction:
1899
65
        DiagID = diag::warn_no_unlock;
1900
65
        break;
1901
32
      case LEK_NotLockedAtEndOfFunction:
1902
32
        DiagID = diag::warn_expecting_locked;
1903
32
        break;
1904
181
    }
1905
181
    if (LocEndOfScope.isInvalid())
1906
127
      LocEndOfScope = FunEndLocation;
1907
1908
181
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1909
181
                                                               << LockName);
1910
181
    Warnings.emplace_back(std::move(Warning),
1911
181
                          makeLockedHereNote(LocLocked, Kind));
1912
181
  }
1913
1914
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1915
                                SourceLocation Loc1,
1916
36
                                SourceLocation Loc2) override {
1917
36
    PartialDiagnosticAt Warning(Loc1,
1918
36
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1919
36
                                    << Kind << LockName);
1920
36
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1921
36
                                       << Kind << LockName);
1922
36
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1923
36
  }
1924
1925
  void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1926
                         ProtectedOperationKind POK, AccessKind AK,
1927
34
                         SourceLocation Loc) override {
1928
34
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1929
34
           "Only works for variables");
1930
34
    unsigned DiagID = POK == POK_VarAccess?
1931
25
                        diag::warn_variable_requires_any_lock:
1932
34
                        
diag::warn_var_deref_requires_any_lock9
;
1933
34
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1934
34
      << D << getLockKindFromAccessKind(AK));
1935
34
    Warnings.emplace_back(std::move(Warning), getNotes());
1936
34
  }
1937
1938
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1939
                          ProtectedOperationKind POK, Name LockName,
1940
                          LockKind LK, SourceLocation Loc,
1941
1.22k
                          Name *PossibleMatch) override {
1942
1.22k
    unsigned DiagID = 0;
1943
1.22k
    if (PossibleMatch) {
1944
76
      switch (POK) {
1945
56
        case POK_VarAccess:
1946
56
          DiagID = diag::warn_variable_requires_lock_precise;
1947
56
          break;
1948
0
        case POK_VarDereference:
1949
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1950
0
          break;
1951
20
        case POK_FunctionCall:
1952
20
          DiagID = diag::warn_fun_requires_lock_precise;
1953
20
          break;
1954
0
        case POK_PassByRef:
1955
0
          DiagID = diag::warn_guarded_pass_by_reference;
1956
0
          break;
1957
0
        case POK_PtPassByRef:
1958
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1959
0
          break;
1960
76
      }
1961
76
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1962
76
                                                       << D
1963
76
                                                       << LockName << LK);
1964
76
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1965
76
                                        << *PossibleMatch);
1966
76
      if (Verbose && 
POK == POK_VarAccess0
) {
1967
0
        PartialDiagnosticAt VNote(D->getLocation(),
1968
0
                                  S.PDiag(diag::note_guarded_by_declared_here)
1969
0
                                      << D->getDeclName());
1970
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1971
0
      } else
1972
76
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1973
1.14k
    } else {
1974
1.14k
      switch (POK) {
1975
630
        case POK_VarAccess:
1976
630
          DiagID = diag::warn_variable_requires_lock;
1977
630
          break;
1978
204
        case POK_VarDereference:
1979
204
          DiagID = diag::warn_var_deref_requires_lock;
1980
204
          break;
1981
206
        case POK_FunctionCall:
1982
206
          DiagID = diag::warn_fun_requires_lock;
1983
206
          break;
1984
88
        case POK_PassByRef:
1985
88
          DiagID = diag::warn_guarded_pass_by_reference;
1986
88
          break;
1987
20
        case POK_PtPassByRef:
1988
20
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1989
20
          break;
1990
1.14k
      }
1991
1.14k
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1992
1.14k
                                                       << D
1993
1.14k
                                                       << LockName << LK);
1994
1.14k
      if (Verbose && 
POK == POK_VarAccess12
) {
1995
6
        PartialDiagnosticAt Note(D->getLocation(),
1996
6
                                 S.PDiag(diag::note_guarded_by_declared_here));
1997
6
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1998
6
      } else
1999
1.14k
        Warnings.emplace_back(std::move(Warning), getNotes());
2000
1.14k
    }
2001
1.22k
  }
2002
2003
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2004
1.11k
                             SourceLocation Loc) override {
2005
1.11k
    PartialDiagnosticAt Warning(Loc,
2006
1.11k
        S.PDiag(diag::warn_acquire_requires_negative_cap)
2007
1.11k
        << Kind << LockName << Neg);
2008
1.11k
    Warnings.emplace_back(std::move(Warning), getNotes());
2009
1.11k
  }
2010
2011
  void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2012
14
                             SourceLocation Loc) override {
2013
14
    PartialDiagnosticAt Warning(
2014
14
        Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2015
14
    Warnings.emplace_back(std::move(Warning), getNotes());
2016
14
  }
2017
2018
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2019
77
                             SourceLocation Loc) override {
2020
77
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2021
77
                                         << Kind << FunName << LockName);
2022
77
    Warnings.emplace_back(std::move(Warning), getNotes());
2023
77
  }
2024
2025
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2026
49
                                SourceLocation Loc) override {
2027
49
    PartialDiagnosticAt Warning(Loc,
2028
49
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2029
49
    Warnings.emplace_back(std::move(Warning), getNotes());
2030
49
  }
2031
2032
20
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2033
20
    PartialDiagnosticAt Warning(Loc,
2034
20
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2035
20
    Warnings.emplace_back(std::move(Warning), getNotes());
2036
20
  }
2037
2038
2.13k
  void enterFunction(const FunctionDecl* FD) override {
2039
2.13k
    CurrentFunction = FD;
2040
2.13k
  }
2041
2042
2.06k
  void leaveFunction(const FunctionDecl* FD) override {
2043
2.06k
    CurrentFunction = nullptr;
2044
2.06k
  }
2045
};
2046
} // anonymous namespace
2047
} // namespace threadSafety
2048
} // namespace clang
2049
2050
//===----------------------------------------------------------------------===//
2051
// -Wconsumed
2052
//===----------------------------------------------------------------------===//
2053
2054
namespace clang {
2055
namespace consumed {
2056
namespace {
2057
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2058
2059
  Sema &S;
2060
  DiagList Warnings;
2061
2062
public:
2063
2064
97
  ConsumedWarningsHandler(Sema &S) : S(S) {}
2065
2066
94
  void emitDiagnostics() override {
2067
94
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2068
110
    for (const auto &Diag : Warnings) {
2069
110
      S.Diag(Diag.first.first, Diag.first.second);
2070
110
      for (const auto &Note : Diag.second)
2071
0
        S.Diag(Note.first, Note.second);
2072
110
    }
2073
94
  }
2074
2075
  void warnLoopStateMismatch(SourceLocation Loc,
2076
2
                             StringRef VariableName) override {
2077
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2078
2
      VariableName);
2079
2080
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2081
2
  }
2082
2083
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
2084
                                        StringRef VariableName,
2085
                                        StringRef ExpectedState,
2086
2
                                        StringRef ObservedState) override {
2087
2088
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2089
2
      diag::warn_param_return_typestate_mismatch) << VariableName <<
2090
2
        ExpectedState << ObservedState);
2091
2092
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2093
2
  }
2094
2095
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2096
6
                                  StringRef ObservedState) override {
2097
2098
6
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2099
6
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2100
2101
6
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2102
6
  }
2103
2104
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2105
1
                                              StringRef TypeName) override {
2106
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2107
1
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2108
2109
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2110
1
  }
2111
2112
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2113
1
                                   StringRef ObservedState) override {
2114
2115
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2116
1
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2117
2118
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2119
1
  }
2120
2121
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2122
4
                                   SourceLocation Loc) override {
2123
2124
4
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2125
4
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2126
2127
4
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2128
4
  }
2129
2130
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2131
94
                             StringRef State, SourceLocation Loc) override {
2132
2133
94
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2134
94
                                MethodName << VariableName << State);
2135
2136
94
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2137
94
  }
2138
};
2139
} // anonymous namespace
2140
} // namespace consumed
2141
} // namespace clang
2142
2143
//===----------------------------------------------------------------------===//
2144
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2145
//  warnings on a function, method, or block.
2146
//===----------------------------------------------------------------------===//
2147
2148
85.5k
sema::AnalysisBasedWarnings::Policy::Policy() {
2149
85.5k
  enableCheckFallThrough = 1;
2150
85.5k
  enableCheckUnreachable = 0;
2151
85.5k
  enableThreadSafetyAnalysis = 0;
2152
85.5k
  enableConsumedAnalysis = 0;
2153
85.5k
}
2154
2155
/// InterProceduralData aims to be a storage of whatever data should be passed
2156
/// between analyses of different functions.
2157
///
2158
/// At the moment, its primary goal is to make the information gathered during
2159
/// the analysis of the blocks available during the analysis of the enclosing
2160
/// function.  This is important due to the fact that blocks are analyzed before
2161
/// the enclosed function is even parsed fully, so it is not viable to access
2162
/// anything in the outer scope while analyzing the block.  On the other hand,
2163
/// re-building CFG for blocks and re-analyzing them when we do have all the
2164
/// information (i.e. during the analysis of the enclosing function) seems to be
2165
/// ill-designed.
2166
class sema::AnalysisBasedWarnings::InterProceduralData {
2167
public:
2168
  // It is important to analyze blocks within functions because it's a very
2169
  // common pattern to capture completion handler parameters by blocks.
2170
  CalledOnceInterProceduralData CalledOnceData;
2171
};
2172
2173
513k
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2174
513k
  return (unsigned)!D.isIgnored(diag, SourceLocation());
2175
513k
}
2176
2177
sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2178
    : S(s), IPData(std::make_unique<InterProceduralData>()),
2179
      NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2180
      MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2181
      NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2182
      NumUninitAnalysisBlockVisits(0),
2183
85.5k
      MaxUninitAnalysisBlockVisitsPerFunction(0) {
2184
2185
85.5k
  using namespace diag;
2186
85.5k
  DiagnosticsEngine &D = S.getDiagnostics();
2187
2188
85.5k
  DefaultPolicy.enableCheckUnreachable =
2189
85.5k
      isEnabled(D, warn_unreachable) || 
isEnabled(D, warn_unreachable_break)85.5k
||
2190
85.5k
      
isEnabled(D, warn_unreachable_return)85.5k
||
2191
85.5k
      
isEnabled(D, warn_unreachable_loop_increment)85.5k
;
2192
2193
85.5k
  DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2194
2195
85.5k
  DefaultPolicy.enableConsumedAnalysis =
2196
85.5k
      isEnabled(D, warn_use_in_invalid_state);
2197
85.5k
}
2198
2199
// We need this here for unique_ptr with forward declared class.
2200
80.9k
sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2201
2202
627
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2203
627
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2204
1
    S.Diag(D.Loc, D.PD);
2205
627
}
2206
2207
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2208
    sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2209
3.11M
    const Decl *D, QualType BlockType) {
2210
2211
  // We avoid doing analysis-based warnings when there are errors for
2212
  // two reasons:
2213
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2214
  //     don't bother trying.
2215
  // (2) The code already has problems; running the analysis just takes more
2216
  //     time.
2217
3.11M
  DiagnosticsEngine &Diags = S.getDiagnostics();
2218
2219
  // Do not do any analysis if we are going to just ignore them.
2220
3.11M
  if (Diags.getIgnoreAllWarnings() ||
2221
3.11M
      
(3.03M
Diags.getSuppressSystemWarnings()3.03M
&&
2222
3.03M
       
S.SourceMgr.isInSystemHeader(D->getLocation())3.02M
))
2223
2.84M
    return;
2224
2225
  // For code in dependent contexts, we'll do this at instantiation time.
2226
271k
  if (cast<DeclContext>(D)->isDependentContext())
2227
10.8k
    return;
2228
2229
260k
  if (S.hasUncompilableErrorOccurred()) {
2230
    // Flush out any possibly unreachable diagnostics.
2231
627
    flushDiagnostics(S, fscope);
2232
627
    return;
2233
627
  }
2234
2235
260k
  const Stmt *Body = D->getBody();
2236
260k
  assert(Body);
2237
2238
  // Construct the analysis context with the specified CFG build options.
2239
0
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2240
2241
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2242
  // explosion for destructors that can result and the compile time hit.
2243
260k
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2244
260k
  AC.getCFGBuildOptions().AddEHEdges = false;
2245
260k
  AC.getCFGBuildOptions().AddInitializers = true;
2246
260k
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2247
260k
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2248
260k
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2249
260k
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2250
2251
  // Force that certain expressions appear as CFGElements in the CFG.  This
2252
  // is used to speed up various analyses.
2253
  // FIXME: This isn't the right factoring.  This is here for initial
2254
  // prototyping, but we need a way for analyses to say what expressions they
2255
  // expect to always be CFGElements and then fill in the BuildOptions
2256
  // appropriately.  This is essentially a layering violation.
2257
260k
  if (P.enableCheckUnreachable || 
P.enableThreadSafetyAnalysis259k
||
2258
260k
      
P.enableConsumedAnalysis257k
) {
2259
    // Unreachable code analysis and thread safety require a linearized CFG.
2260
2.46k
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2261
2.46k
  }
2262
257k
  else {
2263
257k
    AC.getCFGBuildOptions()
2264
257k
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2265
257k
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2266
257k
      .setAlwaysAdd(Stmt::BlockExprClass)
2267
257k
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2268
257k
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2269
257k
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2270
257k
      .setAlwaysAdd(Stmt::UnaryOperatorClass);
2271
257k
  }
2272
2273
  // Install the logical handler.
2274
260k
  llvm::Optional<LogicalErrorHandler> LEH;
2275
260k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2276
40.1k
    LEH.emplace(S);
2277
40.1k
    AC.getCFGBuildOptions().Observer = &*LEH;
2278
40.1k
  }
2279
2280
  // Emit delayed diagnostics.
2281
260k
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2282
10.6k
    bool analyzed = false;
2283
2284
    // Register the expressions with the CFGBuilder.
2285
15.2k
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2286
15.2k
      for (const Stmt *S : D.Stmts)
2287
15.5k
        AC.registerForcedBlockExpression(S);
2288
15.2k
    }
2289
2290
10.6k
    if (AC.getCFG()) {
2291
10.6k
      analyzed = true;
2292
15.2k
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2293
15.2k
        bool AllReachable = true;
2294
15.5k
        for (const Stmt *S : D.Stmts) {
2295
15.5k
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2296
15.5k
          CFGReverseBlockReachabilityAnalysis *cra =
2297
15.5k
              AC.getCFGReachablityAnalysis();
2298
          // FIXME: We should be able to assert that block is non-null, but
2299
          // the CFG analysis can skip potentially-evaluated expressions in
2300
          // edge cases; see test/Sema/vla-2.c.
2301
15.5k
          if (block && 
cra14.4k
) {
2302
            // Can this block be reached from the entrance?
2303
14.4k
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2304
104
              AllReachable = false;
2305
104
              break;
2306
104
            }
2307
14.4k
          }
2308
          // If we cannot map to a basic block, assume the statement is
2309
          // reachable.
2310
15.5k
        }
2311
2312
15.2k
        if (AllReachable)
2313
15.1k
          S.Diag(D.Loc, D.PD);
2314
15.2k
      }
2315
10.6k
    }
2316
2317
10.6k
    if (!analyzed)
2318
0
      flushDiagnostics(S, fscope);
2319
10.6k
  }
2320
2321
  // Warning: check missing 'return'
2322
260k
  if (P.enableCheckFallThrough) {
2323
253k
    const CheckFallThroughDiagnostics &CD =
2324
253k
        (isa<BlockDecl>(D)
2325
253k
             ? 
CheckFallThroughDiagnostics::MakeForBlock()2.12k
2326
253k
             : 
(251k
isa<CXXMethodDecl>(D)251k
&&
2327
251k
                
cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call51.2k
&&
2328
251k
                
cast<CXXMethodDecl>(D)->getParent()->isLambda()3.78k
)
2329
251k
                   ? 
CheckFallThroughDiagnostics::MakeForLambda()3.66k
2330
251k
                   : 
(247k
fscope->isCoroutine()247k
2331
247k
                          ? 
CheckFallThroughDiagnostics::MakeForCoroutine(D)176
2332
247k
                          : 
CheckFallThroughDiagnostics::MakeForFunction(D)247k
));
2333
253k
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2334
253k
  }
2335
2336
  // Warning: check for unreachable code
2337
260k
  if (P.enableCheckUnreachable) {
2338
    // Only check for unreachable code on non-template instantiations.
2339
    // Different template instantiations can effectively change the control-flow
2340
    // and it is very difficult to prove that a snippet of code in a template
2341
    // is unreachable for all instantiations.
2342
168
    bool isTemplateInstantiation = false;
2343
168
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2344
164
      isTemplateInstantiation = Function->isTemplateInstantiation();
2345
168
    if (!isTemplateInstantiation)
2346
164
      CheckUnreachable(S, AC);
2347
168
  }
2348
2349
  // Check for thread safety violations
2350
260k
  if (P.enableThreadSafetyAnalysis) {
2351
2.24k
    SourceLocation FL = AC.getDecl()->getLocation();
2352
2.24k
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2353
2.24k
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2354
2.24k
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2355
2.17k
      Reporter.setIssueBetaWarnings(true);
2356
2.24k
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2357
41
      Reporter.setVerbose(true);
2358
2359
2.24k
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2360
2.24k
                                          &S.ThreadSafetyDeclCache);
2361
2.24k
    Reporter.emitDiagnostics();
2362
2.24k
  }
2363
2364
  // Check for violations of consumed properties.
2365
260k
  if (P.enableConsumedAnalysis) {
2366
97
    consumed::ConsumedWarningsHandler WarningHandler(S);
2367
97
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2368
97
    Analyzer.run(AC);
2369
97
  }
2370
2371
260k
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2372
260k
      
!Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc())205k
||
2373
260k
      
!Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc())205k
||
2374
260k
      
!Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())205k
) {
2375
54.4k
    if (CFG *cfg = AC.getCFG()) {
2376
54.4k
      UninitValsDiagReporter reporter(S);
2377
54.4k
      UninitVariablesAnalysisStats stats;
2378
54.4k
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2379
54.4k
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2380
54.4k
                                        reporter, stats);
2381
2382
54.4k
      if (S.CollectStats && 
stats.NumVariablesAnalyzed > 00
) {
2383
0
        ++NumUninitAnalysisFunctions;
2384
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2385
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2386
0
        MaxUninitAnalysisVariablesPerFunction =
2387
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2388
0
                     stats.NumVariablesAnalyzed);
2389
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2390
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2391
0
                     stats.NumBlockVisits);
2392
0
      }
2393
54.4k
    }
2394
54.4k
  }
2395
2396
  // Check for violations of "called once" parameter properties.
2397
260k
  if (S.getLangOpts().ObjC && 
!S.getLangOpts().CPlusPlus28.7k
&&
2398
260k
      
shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())10.5k
) {
2399
10.5k
    if (AC.getCFG()) {
2400
10.5k
      CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2401
10.5k
      checkCalledOnceParameters(
2402
10.5k
          AC, Reporter,
2403
10.5k
          shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2404
10.5k
    }
2405
10.5k
  }
2406
2407
260k
  bool FallThroughDiagFull =
2408
260k
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2409
260k
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2410
260k
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2411
260k
  if (FallThroughDiagFull || 
FallThroughDiagPerFunction259k
||
2412
260k
      
fscope->HasFallthroughStmt259k
) {
2413
122
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2414
122
  }
2415
2416
260k
  if (S.getLangOpts().ObjCWeak &&
2417
260k
      
!Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc())1.52k
)
2418
112
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2419
2420
2421
  // Check for infinite self-recursion in functions
2422
260k
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2423
260k
                       D->getBeginLoc())) {
2424
40.1k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2425
40.1k
      checkRecursiveFunction(S, FD, Body, AC);
2426
40.1k
    }
2427
40.1k
  }
2428
2429
  // Check for throw out of non-throwing function.
2430
260k
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2431
259k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2432
252k
      if (S.getLangOpts().CPlusPlus && 
isNoexcept(FD)149k
)
2433
7.83k
        checkThrowInNonThrowingFunc(S, FD, AC);
2434
2435
  // If none of the previous checks caused a CFG build, trigger one here
2436
  // for the logical error handler.
2437
260k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2438
40.1k
    AC.getCFG();
2439
40.1k
  }
2440
2441
  // Collect statistics about the CFG if it was built.
2442
260k
  if (S.CollectStats && 
AC.isCFGBuilt()4
) {
2443
3
    ++NumFunctionsAnalyzed;
2444
3
    if (CFG *cfg = AC.getCFG()) {
2445
      // If we successfully built a CFG for this context, record some more
2446
      // detail information about it.
2447
3
      NumCFGBlocks += cfg->getNumBlockIDs();
2448
3
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2449
3
                                         cfg->getNumBlockIDs());
2450
3
    } else {
2451
0
      ++NumFunctionsWithBadCFGs;
2452
0
    }
2453
3
  }
2454
260k
}
2455
2456
4
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2457
4
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2458
2459
4
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2460
4
  unsigned AvgCFGBlocksPerFunction =
2461
4
      !NumCFGsBuilt ? 
01
:
NumCFGBlocks/NumCFGsBuilt3
;
2462
4
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2463
4
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2464
4
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2465
4
               << "  " << AvgCFGBlocksPerFunction
2466
4
               << " average CFG blocks per function.\n"
2467
4
               << "  " << MaxCFGBlocksPerFunction
2468
4
               << " max CFG blocks per function.\n";
2469
2470
4
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2471
4
      : 
NumUninitAnalysisVariables/NumUninitAnalysisFunctions0
;
2472
4
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2473
4
      : 
NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions0
;
2474
4
  llvm::errs() << NumUninitAnalysisFunctions
2475
4
               << " functions analyzed for uninitialiazed variables\n"
2476
4
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2477
4
               << "  " << AvgUninitVariablesPerFunction
2478
4
               << " average variables per function.\n"
2479
4
               << "  " << MaxUninitAnalysisVariablesPerFunction
2480
4
               << " max variables per function.\n"
2481
4
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2482
4
               << "  " << AvgUninitBlockVisitsPerFunction
2483
4
               << " average block visits per function.\n"
2484
4
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2485
4
               << " max block visits per function.\n";
2486
4
}