Coverage Report

Created: 2021-06-15 06:44

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/AST/DeclObjC.h"
18
#include "clang/AST/EvaluatedExprVisitor.h"
19
#include "clang/AST/ExprCXX.h"
20
#include "clang/AST/ExprObjC.h"
21
#include "clang/AST/ParentMap.h"
22
#include "clang/AST/RecursiveASTVisitor.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/AST/StmtObjC.h"
25
#include "clang/AST/StmtVisitor.h"
26
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27
#include "clang/Analysis/Analyses/CalledOnceCheck.h"
28
#include "clang/Analysis/Analyses/Consumed.h"
29
#include "clang/Analysis/Analyses/ReachableCode.h"
30
#include "clang/Analysis/Analyses/ThreadSafety.h"
31
#include "clang/Analysis/Analyses/UninitializedValues.h"
32
#include "clang/Analysis/AnalysisDeclContext.h"
33
#include "clang/Analysis/CFG.h"
34
#include "clang/Analysis/CFGStmtMap.h"
35
#include "clang/Basic/SourceLocation.h"
36
#include "clang/Basic/SourceManager.h"
37
#include "clang/Lex/Preprocessor.h"
38
#include "clang/Sema/ScopeInfo.h"
39
#include "clang/Sema/SemaInternal.h"
40
#include "llvm/ADT/ArrayRef.h"
41
#include "llvm/ADT/BitVector.h"
42
#include "llvm/ADT/MapVector.h"
43
#include "llvm/ADT/SmallString.h"
44
#include "llvm/ADT/SmallVector.h"
45
#include "llvm/ADT/StringRef.h"
46
#include "llvm/Support/Casting.h"
47
#include <algorithm>
48
#include <deque>
49
#include <iterator>
50
51
using namespace clang;
52
53
//===----------------------------------------------------------------------===//
54
// Unreachable code analysis.
55
//===----------------------------------------------------------------------===//
56
57
namespace {
58
  class UnreachableCodeHandler : public reachable_code::Callback {
59
    Sema &S;
60
    SourceRange PreviousSilenceableCondVal;
61
62
  public:
63
157
    UnreachableCodeHandler(Sema &s) : S(s) {}
64
65
    void HandleUnreachable(reachable_code::UnreachableKind UK,
66
                           SourceLocation L,
67
                           SourceRange SilenceableCondVal,
68
                           SourceRange R1,
69
170
                           SourceRange R2) override {
70
      // Avoid reporting multiple unreachable code diagnostics that are
71
      // triggered by the same conditional value.
72
170
      if (PreviousSilenceableCondVal.isValid() &&
73
170
          
SilenceableCondVal.isValid()25
&&
74
170
          
PreviousSilenceableCondVal == SilenceableCondVal25
)
75
2
        return;
76
168
      PreviousSilenceableCondVal = SilenceableCondVal;
77
78
168
      unsigned diag = diag::warn_unreachable;
79
168
      switch (UK) {
80
16
        case reachable_code::UK_Break:
81
16
          diag = diag::warn_unreachable_break;
82
16
          break;
83
32
        case reachable_code::UK_Return:
84
32
          diag = diag::warn_unreachable_return;
85
32
          break;
86
3
        case reachable_code::UK_Loop_Increment:
87
3
          diag = diag::warn_unreachable_loop_increment;
88
3
          break;
89
117
        case reachable_code::UK_Other:
90
117
          break;
91
168
      }
92
93
168
      S.Diag(L, diag) << R1 << R2;
94
95
168
      SourceLocation Open = SilenceableCondVal.getBegin();
96
168
      if (Open.isValid()) {
97
43
        SourceLocation Close = SilenceableCondVal.getEnd();
98
43
        Close = S.getLocForEndOfToken(Close);
99
43
        if (Close.isValid()) {
100
43
          S.Diag(Open, diag::note_unreachable_silence)
101
43
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
102
43
            << FixItHint::CreateInsertion(Close, ")");
103
43
        }
104
43
      }
105
168
    }
106
  };
107
} // anonymous namespace
108
109
/// CheckUnreachable - Check for unreachable code.
110
159
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
111
  // As a heuristic prune all diagnostics not in the main file.  Currently
112
  // the majority of warnings in headers are false positives.  These
113
  // are largely caused by configuration state, e.g. preprocessor
114
  // defined code, etc.
115
  //
116
  // Note that this is also a performance optimization.  Analyzing
117
  // headers many times can be expensive.
118
159
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
119
2
    return;
120
121
157
  UnreachableCodeHandler UC(S);
122
157
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
123
157
}
124
125
namespace {
126
/// Warn on logical operator errors in CFGBuilder
127
class LogicalErrorHandler : public CFGCallback {
128
  Sema &S;
129
130
public:
131
38.8k
  LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
132
133
1.22k
  static bool HasMacroID(const Expr *E) {
134
1.22k
    if (E->getExprLoc().isMacroID())
135
6
      return true;
136
137
    // Recurse to children.
138
1.21k
    for (const Stmt *SubStmt : E->children())
139
1.07k
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
140
1.07k
        if (HasMacroID(SubExpr))
141
16
          return true;
142
143
1.19k
    return false;
144
1.21k
  }
145
146
104
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
147
104
    if (HasMacroID(B))
148
2
      return;
149
150
102
    SourceRange DiagRange = B->getSourceRange();
151
102
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
152
102
        << DiagRange << isAlwaysTrue;
153
102
  }
154
155
  void compareBitwiseEquality(const BinaryOperator *B,
156
28
                              bool isAlwaysTrue) override {
157
28
    if (HasMacroID(B))
158
4
      return;
159
160
24
    SourceRange DiagRange = B->getSourceRange();
161
24
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
162
24
        << DiagRange << isAlwaysTrue;
163
24
  }
164
165
16
  void compareBitwiseOr(const BinaryOperator *B) override {
166
16
    if (HasMacroID(B))
167
0
      return;
168
169
16
    SourceRange DiagRange = B->getSourceRange();
170
16
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
171
16
  }
172
173
  static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
174
571k
                                   SourceLocation Loc) {
175
571k
    return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
176
571k
           
!Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc)493k
;
177
571k
  }
178
};
179
} // anonymous namespace
180
181
//===----------------------------------------------------------------------===//
182
// Check for infinite self-recursion in functions
183
//===----------------------------------------------------------------------===//
184
185
// Returns true if the function is called anywhere within the CFGBlock.
186
// For member functions, the additional condition of being call from the
187
// this pointer is required.
188
40.9k
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
189
  // Process all the Stmt's in this block to find any calls to FD.
190
456k
  for (const auto &B : Block) {
191
456k
    if (B.getKind() != CFGElement::Statement)
192
742
      continue;
193
194
455k
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
195
455k
    if (!CE || 
!CE->getCalleeDecl()45.4k
||
196
455k
        
CE->getCalleeDecl()->getCanonicalDecl() != FD45.4k
)
197
455k
      continue;
198
199
    // Skip function calls which are qualified with a templated class.
200
17
    if (const DeclRefExpr *DRE =
201
17
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
202
14
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
203
1
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
204
1
            isa<TemplateSpecializationType>(NNS->getAsType())) {
205
1
          continue;
206
1
        }
207
1
      }
208
14
    }
209
210
16
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
211
16
    if (!MCE || 
isa<CXXThisExpr>(MCE->getImplicitObjectArgument())3
||
212
16
        
!MCE->getMethodDecl()->isVirtual()1
)
213
16
      return true;
214
16
  }
215
40.9k
  return false;
216
40.9k
}
217
218
// Returns true if every path from the entry block passes through a call to FD.
219
38.8k
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
220
38.8k
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
221
38.8k
  llvm::SmallVector<CFGBlock *, 16> WorkList;
222
  // Keep track of whether we found at least one recursive path.
223
38.8k
  bool foundRecursion = false;
224
225
38.8k
  const unsigned ExitID = cfg->getExit().getBlockID();
226
227
  // Seed the work list with the entry block.
228
38.8k
  WorkList.push_back(&cfg->getEntry());
229
230
79.0k
  while (!WorkList.empty()) {
231
79.0k
    CFGBlock *Block = WorkList.pop_back_val();
232
233
120k
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; 
++I41.4k
) {
234
80.2k
      if (CFGBlock *SuccBlock = *I) {
235
79.8k
        if (!Visited.insert(SuccBlock).second)
236
55
          continue;
237
238
        // Found a path to the exit node without a recursive call.
239
79.7k
        if (ExitID == SuccBlock->getBlockID())
240
38.7k
          return false;
241
242
        // If the successor block contains a recursive call, end analysis there.
243
40.9k
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
244
16
          foundRecursion = true;
245
16
          continue;
246
16
        }
247
248
40.9k
        WorkList.push_back(SuccBlock);
249
40.9k
      }
250
80.2k
    }
251
79.0k
  }
252
15
  return foundRecursion;
253
38.8k
}
254
255
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
256
38.8k
                                   const Stmt *Body, AnalysisDeclContext &AC) {
257
38.8k
  FD = FD->getCanonicalDecl();
258
259
  // Only run on non-templated functions and non-templated members of
260
  // templated classes.
261
38.8k
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
262
38.8k
      
FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization48
)
263
24
    return;
264
265
38.8k
  CFG *cfg = AC.getCFG();
266
38.8k
  if (!cfg) 
return0
;
267
268
  // If the exit block is unreachable, skip processing the function.
269
38.8k
  if (cfg->getExit().pred_empty())
270
5
    return;
271
272
  // Emit diagnostic if a recursive function call is detected for all paths.
273
38.8k
  if (checkForRecursiveFunctionCall(FD, cfg))
274
13
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
275
38.8k
}
276
277
//===----------------------------------------------------------------------===//
278
// Check for throw in a non-throwing function.
279
//===----------------------------------------------------------------------===//
280
281
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
282
/// can reach ExitBlock.
283
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
284
88
                         CFG *Body) {
285
88
  SmallVector<CFGBlock *, 16> Stack;
286
88
  llvm::BitVector Queued(Body->getNumBlockIDs());
287
288
88
  Stack.push_back(&ThrowBlock);
289
88
  Queued[ThrowBlock.getBlockID()] = true;
290
291
180
  while (!Stack.empty()) {
292
145
    CFGBlock &UnwindBlock = *Stack.back();
293
145
    Stack.pop_back();
294
295
167
    for (auto &Succ : UnwindBlock.succs()) {
296
167
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
297
0
        continue;
298
299
167
      if (Succ->getBlockID() == Body->getExit().getBlockID())
300
53
        return true;
301
302
114
      if (auto *Catch =
303
114
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
304
57
        QualType Caught = Catch->getCaughtType();
305
57
        if (Caught.isNull() || // catch (...) catches everything
306
57
            
!E->getSubExpr()53
|| // throw; is considered cuaght by any handler
307
57
            
S.handlerCanCatch(Caught, E->getSubExpr()->getType())50
)
308
          // Exception doesn't escape via this path.
309
35
          break;
310
57
      } else {
311
57
        Stack.push_back(Succ);
312
57
        Queued[Succ->getBlockID()] = true;
313
57
      }
314
114
    }
315
145
  }
316
317
35
  return false;
318
88
}
319
320
static void visitReachableThrows(
321
    CFG *BodyCFG,
322
7.39k
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
323
7.39k
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
324
7.39k
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
325
24.2k
  for (CFGBlock *B : *BodyCFG) {
326
24.2k
    if (!Reachable[B->getBlockID()])
327
36
      continue;
328
51.4k
    
for (CFGElement &E : *B)24.2k
{
329
51.4k
      Optional<CFGStmt> S = E.getAs<CFGStmt>();
330
51.4k
      if (!S)
331
535
        continue;
332
50.9k
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
333
88
        Visit(Throw, *B);
334
50.9k
    }
335
24.2k
  }
336
7.39k
}
337
338
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
339
53
                                                 const FunctionDecl *FD) {
340
53
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
341
53
      FD->getTypeSourceInfo()) {
342
53
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
343
53
    if (S.getLangOpts().CPlusPlus11 &&
344
53
        (isa<CXXDestructorDecl>(FD) ||
345
53
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Delete42
||
346
53
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete41
)) {
347
12
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
348
12
                                         getAs<FunctionProtoType>())
349
12
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
350
12
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
351
12
            << FD->getExceptionSpecSourceRange();
352
12
    } else
353
41
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
354
41
          << FD->getExceptionSpecSourceRange();
355
53
  }
356
53
}
357
358
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
359
7.39k
                                        AnalysisDeclContext &AC) {
360
7.39k
  CFG *BodyCFG = AC.getCFG();
361
7.39k
  if (!BodyCFG)
362
2
    return;
363
7.39k
  if (BodyCFG->getExit().pred_empty())
364
0
    return;
365
7.39k
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
366
88
    if (throwEscapes(S, Throw, Block, BodyCFG))
367
53
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
368
88
  });
369
7.39k
}
370
371
148k
static bool isNoexcept(const FunctionDecl *FD) {
372
148k
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
373
148k
  if (FPT->isNothrow() || 
FD->hasAttr<NoThrowAttr>()141k
)
374
7.39k
    return true;
375
141k
  return false;
376
148k
}
377
378
//===----------------------------------------------------------------------===//
379
// Check for missing return value.
380
//===----------------------------------------------------------------------===//
381
382
enum ControlFlowKind {
383
  UnknownFallThrough,
384
  NeverFallThrough,
385
  MaybeFallThrough,
386
  AlwaysFallThrough,
387
  NeverFallThroughOrReturn
388
};
389
390
/// CheckFallThrough - Check that we don't fall off the end of a
391
/// Statement that should return a value.
392
///
393
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
394
/// MaybeFallThrough iff we might or might not fall off the end,
395
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
396
/// return.  We assume NeverFallThrough iff we never fall off the end of the
397
/// statement but we may return.  We assume that functions not marked noreturn
398
/// will return.
399
156k
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
400
156k
  CFG *cfg = AC.getCFG();
401
156k
  if (!cfg) 
return UnknownFallThrough39
;
402
403
  // The CFG leaves in dead things, and we don't want the dead code paths to
404
  // confuse us, so we mark all live things first.
405
156k
  llvm::BitVector live(cfg->getNumBlockIDs());
406
156k
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
407
156k
                                                          live);
408
409
156k
  bool AddEHEdges = AC.getAddEHEdges();
410
156k
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
411
    // When there are things remaining dead, and we didn't add EH edges
412
    // from CallExprs to the catch clauses, we have to go back and
413
    // mark them as live.
414
7.46k
    
for (const auto *B : *cfg)1.04k
{
415
7.46k
      if (!live[B->getBlockID()]) {
416
1.69k
        if (B->pred_begin() == B->pred_end()) {
417
1.05k
          const Stmt *Term = B->getTerminatorStmt();
418
1.05k
          if (Term && 
isa<CXXTryStmt>(Term)256
)
419
            // When not adding EH edges from calls, catch clauses
420
            // can otherwise seem dead.  Avoid noting them as dead.
421
180
            count += reachable_code::ScanReachableFromBlock(B, live);
422
1.05k
          continue;
423
1.05k
        }
424
1.69k
      }
425
7.46k
    }
426
427
  // Now we know what is live, we check the live precessors of the exit block
428
  // and look for fall through paths, being careful to ignore normal returns,
429
  // and exceptional paths.
430
156k
  bool HasLiveReturn = false;
431
156k
  bool HasFakeEdge = false;
432
156k
  bool HasPlainEdge = false;
433
156k
  bool HasAbnormalEdge = false;
434
435
  // Ignore default cases that aren't likely to be reachable because all
436
  // enums in a switch(X) have explicit case statements.
437
156k
  CFGBlock::FilterOptions FO;
438
156k
  FO.IgnoreDefaultsWithCoveredEnums = 1;
439
440
156k
  for (CFGBlock::filtered_pred_iterator I =
441
156k
           cfg->getExit().filtered_pred_start_end(FO);
442
316k
       I.hasMore(); 
++I159k
) {
443
159k
    const CFGBlock &B = **I;
444
159k
    if (!live[B.getBlockID()])
445
706
      continue;
446
447
    // Skip blocks which contain an element marked as no-return. They don't
448
    // represent actually viable edges into the exit block, so mark them as
449
    // abnormal.
450
159k
    if (B.hasNoReturnElement()) {
451
449
      HasAbnormalEdge = true;
452
449
      continue;
453
449
    }
454
455
    // Destructors can appear after the 'return' in the CFG.  This is
456
    // normal.  We need to look pass the destructors for the return
457
    // statement (if it exists).
458
158k
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
459
460
160k
    for ( ; ri != re ; 
++ri1.64k
)
461
159k
      if (ri->getAs<CFGStmt>())
462
157k
        break;
463
464
    // No more CFGElements in the block?
465
158k
    if (ri == re) {
466
814
      const Stmt *Term = B.getTerminatorStmt();
467
814
      if (Term && 
isa<CXXTryStmt>(Term)167
) {
468
163
        HasAbnormalEdge = true;
469
163
        continue;
470
163
      }
471
      // A labeled empty statement, or the entry block...
472
651
      HasPlainEdge = true;
473
651
      continue;
474
814
    }
475
476
157k
    CFGStmt CS = ri->castAs<CFGStmt>();
477
157k
    const Stmt *S = CS.getStmt();
478
157k
    if (isa<ReturnStmt>(S) || 
isa<CoreturnStmt>(S)337
) {
479
157k
      HasLiveReturn = true;
480
157k
      continue;
481
157k
    }
482
321
    if (isa<ObjCAtThrowStmt>(S)) {
483
5
      HasFakeEdge = true;
484
5
      continue;
485
5
    }
486
316
    if (isa<CXXThrowExpr>(S)) {
487
51
      HasFakeEdge = true;
488
51
      continue;
489
51
    }
490
265
    if (isa<MSAsmStmt>(S)) {
491
      // TODO: Verify this is correct.
492
16
      HasFakeEdge = true;
493
16
      HasLiveReturn = true;
494
16
      continue;
495
16
    }
496
249
    if (isa<CXXTryStmt>(S)) {
497
0
      HasAbnormalEdge = true;
498
0
      continue;
499
0
    }
500
249
    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
501
249
        == B.succ_end()) {
502
0
      HasAbnormalEdge = true;
503
0
      continue;
504
0
    }
505
506
249
    HasPlainEdge = true;
507
249
  }
508
156k
  if (!HasPlainEdge) {
509
155k
    if (HasLiveReturn)
510
155k
      return NeverFallThrough;
511
291
    return NeverFallThroughOrReturn;
512
155k
  }
513
890
  if (HasAbnormalEdge || 
HasFakeEdge872
||
HasLiveReturn872
)
514
39
    return MaybeFallThrough;
515
  // This says AlwaysFallThrough for calls to functions that are not marked
516
  // noreturn, that don't return.  If people would like this warning to be more
517
  // accurate, such functions should be marked as noreturn.
518
851
  return AlwaysFallThrough;
519
890
}
520
521
namespace {
522
523
struct CheckFallThroughDiagnostics {
524
  unsigned diag_MaybeFallThrough_HasNoReturn;
525
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
526
  unsigned diag_AlwaysFallThrough_HasNoReturn;
527
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
528
  unsigned diag_NeverFallThroughOrReturn;
529
  enum { Function, Block, Lambda, Coroutine } funMode;
530
  SourceLocation FuncLoc;
531
532
273k
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
533
273k
    CheckFallThroughDiagnostics D;
534
273k
    D.FuncLoc = Func->getLocation();
535
273k
    D.diag_MaybeFallThrough_HasNoReturn =
536
273k
      diag::warn_falloff_noreturn_function;
537
273k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
538
273k
      diag::warn_maybe_falloff_nonvoid_function;
539
273k
    D.diag_AlwaysFallThrough_HasNoReturn =
540
273k
      diag::warn_falloff_noreturn_function;
541
273k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
542
273k
      diag::warn_falloff_nonvoid_function;
543
544
    // Don't suggest that virtual functions be marked "noreturn", since they
545
    // might be overridden by non-noreturn functions.
546
273k
    bool isVirtualMethod = false;
547
273k
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
548
47.6k
      isVirtualMethod = Method->isVirtual();
549
550
    // Don't suggest that template instantiations be marked "noreturn"
551
273k
    bool isTemplateInstantiation = false;
552
273k
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
553
268k
      isTemplateInstantiation = Function->isTemplateInstantiation();
554
555
273k
    if (!isVirtualMethod && 
!isTemplateInstantiation271k
)
556
255k
      D.diag_NeverFallThroughOrReturn =
557
255k
        diag::warn_suggest_noreturn_function;
558
18.1k
    else
559
18.1k
      D.diag_NeverFallThroughOrReturn = 0;
560
561
273k
    D.funMode = Function;
562
273k
    return D;
563
273k
  }
564
565
85
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
566
85
    CheckFallThroughDiagnostics D;
567
85
    D.FuncLoc = Func->getLocation();
568
85
    D.diag_MaybeFallThrough_HasNoReturn = 0;
569
85
    D.diag_MaybeFallThrough_ReturnsNonVoid =
570
85
        diag::warn_maybe_falloff_nonvoid_coroutine;
571
85
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
572
85
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
573
85
        diag::warn_falloff_nonvoid_coroutine;
574
85
    D.funMode = Coroutine;
575
85
    return D;
576
85
  }
577
578
2.05k
  static CheckFallThroughDiagnostics MakeForBlock() {
579
2.05k
    CheckFallThroughDiagnostics D;
580
2.05k
    D.diag_MaybeFallThrough_HasNoReturn =
581
2.05k
      diag::err_noreturn_block_has_return_expr;
582
2.05k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
583
2.05k
      diag::err_maybe_falloff_nonvoid_block;
584
2.05k
    D.diag_AlwaysFallThrough_HasNoReturn =
585
2.05k
      diag::err_noreturn_block_has_return_expr;
586
2.05k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
587
2.05k
      diag::err_falloff_nonvoid_block;
588
2.05k
    D.diag_NeverFallThroughOrReturn = 0;
589
2.05k
    D.funMode = Block;
590
2.05k
    return D;
591
2.05k
  }
592
593
3.37k
  static CheckFallThroughDiagnostics MakeForLambda() {
594
3.37k
    CheckFallThroughDiagnostics D;
595
3.37k
    D.diag_MaybeFallThrough_HasNoReturn =
596
3.37k
      diag::err_noreturn_lambda_has_return_expr;
597
3.37k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
598
3.37k
      diag::warn_maybe_falloff_nonvoid_lambda;
599
3.37k
    D.diag_AlwaysFallThrough_HasNoReturn =
600
3.37k
      diag::err_noreturn_lambda_has_return_expr;
601
3.37k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
602
3.37k
      diag::warn_falloff_nonvoid_lambda;
603
3.37k
    D.diag_NeverFallThroughOrReturn = 0;
604
3.37k
    D.funMode = Lambda;
605
3.37k
    return D;
606
3.37k
  }
607
608
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
609
279k
                        bool HasNoReturn) const {
610
279k
    if (funMode == Function) {
611
273k
      return (ReturnsVoid ||
612
273k
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
613
154k
                          FuncLoc)) &&
614
273k
             
(118k
!HasNoReturn118k
||
615
118k
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
616
171
                          FuncLoc)) &&
617
273k
             
(118k
!ReturnsVoid118k
||
618
118k
              
D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc)118k
);
619
273k
    }
620
5.51k
    if (funMode == Coroutine) {
621
85
      return (ReturnsVoid ||
622
85
              
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc)20
||
623
85
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
624
20
                          FuncLoc)) &&
625
85
             
(!HasNoReturn)65
;
626
85
    }
627
    // For blocks / lambdas.
628
5.43k
    return ReturnsVoid && 
!HasNoReturn4.33k
;
629
5.51k
  }
630
};
631
632
} // anonymous namespace
633
634
/// CheckFallThroughForBody - Check that we don't fall off the end of a
635
/// function that should return a value.  Check that we don't fall off the end
636
/// of a noreturn function.  We assume that functions and blocks not marked
637
/// noreturn will return.
638
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
639
                                    QualType BlockType,
640
                                    const CheckFallThroughDiagnostics &CD,
641
                                    AnalysisDeclContext &AC,
642
279k
                                    sema::FunctionScopeInfo *FSI) {
643
644
279k
  bool ReturnsVoid = false;
645
279k
  bool HasNoReturn = false;
646
279k
  bool IsCoroutine = FSI->isCoroutine();
647
648
279k
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
649
271k
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
650
86
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
651
271k
    else
652
271k
      ReturnsVoid = FD->getReturnType()->isVoidType();
653
271k
    HasNoReturn = FD->isNoReturn();
654
271k
  }
655
7.37k
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
656
5.31k
    ReturnsVoid = MD->getReturnType()->isVoidType();
657
5.31k
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
658
5.31k
  }
659
2.05k
  else if (isa<BlockDecl>(D)) {
660
2.05k
    if (const FunctionType *FT =
661
2.05k
          BlockType->getPointeeType()->getAs<FunctionType>()) {
662
2.05k
      if (FT->getReturnType()->isVoidType())
663
1.65k
        ReturnsVoid = true;
664
2.05k
      if (FT->getNoReturnAttr())
665
1
        HasNoReturn = true;
666
2.05k
    }
667
2.05k
  }
668
669
279k
  DiagnosticsEngine &Diags = S.getDiagnostics();
670
671
  // Short circuit for compilation speed.
672
279k
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
673
122k
      return;
674
156k
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
675
156k
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
676
842
    if (IsCoroutine)
677
8
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
678
834
    else
679
834
      S.Diag(Loc, DiagID);
680
842
  };
681
682
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
683
156k
  if (D->getAsFunction() && 
D->getAsFunction()->isCPUDispatchMultiVersion()152k
)
684
6
    return;
685
686
  // Either in a function body compound statement, or a function-try-block.
687
156k
  switch (CheckFallThrough(AC)) {
688
39
    case UnknownFallThrough:
689
39
      break;
690
691
39
    case MaybeFallThrough:
692
39
      if (HasNoReturn)
693
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
694
39
      else if (!ReturnsVoid)
695
38
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
696
39
      break;
697
851
    case AlwaysFallThrough:
698
851
      if (HasNoReturn)
699
101
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
700
750
      else if (!ReturnsVoid)
701
703
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
702
851
      break;
703
291
    case NeverFallThroughOrReturn:
704
291
      if (ReturnsVoid && 
!HasNoReturn78
&&
CD.diag_NeverFallThroughOrReturn9
) {
705
4
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
706
3
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
707
3
        } else 
if (const ObjCMethodDecl *1
MD1
= dyn_cast<ObjCMethodDecl>(D)) {
708
1
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
709
1
        } else {
710
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
711
0
        }
712
4
      }
713
291
      break;
714
155k
    case NeverFallThrough:
715
155k
      break;
716
156k
  }
717
156k
}
718
719
//===----------------------------------------------------------------------===//
720
// -Wuninitialized
721
//===----------------------------------------------------------------------===//
722
723
namespace {
724
/// ContainsReference - A visitor class to search for references to
725
/// a particular declaration (the needle) within any evaluated component of an
726
/// expression (recursively).
727
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
728
  bool FoundReference;
729
  const DeclRefExpr *Needle;
730
731
public:
732
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
733
734
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
735
69
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
736
737
300
  void VisitExpr(const Expr *E) {
738
    // Stop evaluating if we already have a reference.
739
300
    if (FoundReference)
740
25
      return;
741
742
275
    Inherited::VisitExpr(E);
743
275
  }
744
745
128
  void VisitDeclRefExpr(const DeclRefExpr *E) {
746
128
    if (E == Needle)
747
67
      FoundReference = true;
748
61
    else
749
61
      Inherited::VisitDeclRefExpr(E);
750
128
  }
751
752
69
  bool doesContainReference() const { return FoundReference; }
753
};
754
} // anonymous namespace
755
756
1.02k
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
757
1.02k
  QualType VariableTy = VD->getType().getCanonicalType();
758
1.02k
  if (VariableTy->isBlockPointerType() &&
759
1.02k
      
!VD->hasAttr<BlocksAttr>()3
) {
760
3
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
761
3
        << VD->getDeclName()
762
3
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
763
3
    return true;
764
3
  }
765
766
  // Don't issue a fixit if there is already an initializer.
767
1.02k
  if (VD->getInit())
768
2
    return false;
769
770
  // Don't suggest a fixit inside macros.
771
1.02k
  if (VD->getEndLoc().isMacroID())
772
2
    return false;
773
774
1.01k
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
775
776
  // Suggest possible initialization (if any).
777
1.01k
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
778
1.01k
  if (Init.empty())
779
5
    return false;
780
781
1.01k
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
782
1.01k
    << FixItHint::CreateInsertion(Loc, Init);
783
1.01k
  return true;
784
1.01k
}
785
786
/// Create a fixit to remove an if-like statement, on the assumption that its
787
/// condition is CondVal.
788
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
789
                          const Stmt *Else, bool CondVal,
790
23
                          FixItHint &Fixit1, FixItHint &Fixit2) {
791
23
  if (CondVal) {
792
    // If condition is always true, remove all but the 'then'.
793
12
    Fixit1 = FixItHint::CreateRemoval(
794
12
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
795
12
    if (Else) {
796
2
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
797
2
      Fixit2 =
798
2
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
799
2
    }
800
12
  } else {
801
    // If condition is always false, remove all but the 'else'.
802
11
    if (Else)
803
11
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
804
11
          If->getBeginLoc(), Else->getBeginLoc()));
805
0
    else
806
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
807
11
  }
808
23
}
809
810
/// DiagUninitUse -- Helper function to produce a diagnostic for an
811
/// uninitialized use of a variable.
812
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
813
1.02k
                          bool IsCapturedByBlock) {
814
1.02k
  bool Diagnosed = false;
815
816
1.02k
  switch (Use.getKind()) {
817
927
  case UninitUse::Always:
818
927
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
819
927
        << VD->getDeclName() << IsCapturedByBlock
820
927
        << Use.getUser()->getSourceRange();
821
927
    return;
822
823
5
  case UninitUse::AfterDecl:
824
11
  case UninitUse::AfterCall:
825
11
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
826
11
      << VD->getDeclName() << IsCapturedByBlock
827
11
      << (Use.getKind() == UninitUse::AfterDecl ? 
45
:
56
)
828
11
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
829
11
      << VD->getSourceRange();
830
11
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
831
11
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
832
11
    return;
833
834
31
  case UninitUse::Maybe:
835
84
  case UninitUse::Sometimes:
836
    // Carry on to report sometimes-uninitialized branches, if possible,
837
    // or a 'may be used uninitialized' diagnostic otherwise.
838
84
    break;
839
1.02k
  }
840
841
  // Diagnose each branch which leads to a sometimes-uninitialized use.
842
84
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
843
139
       I != E; 
++I55
) {
844
55
    assert(Use.getKind() == UninitUse::Sometimes);
845
846
0
    const Expr *User = Use.getUser();
847
55
    const Stmt *Term = I->Terminator;
848
849
    // Information used when building the diagnostic.
850
55
    unsigned DiagKind;
851
55
    StringRef Str;
852
55
    SourceRange Range;
853
854
    // FixIts to suppress the diagnostic by removing the dead condition.
855
    // For all binary terminators, branch 0 is taken if the condition is true,
856
    // and branch 1 is taken if the condition is false.
857
55
    int RemoveDiagKind = -1;
858
55
    const char *FixitStr =
859
55
        S.getLangOpts().CPlusPlus ? 
(47
I->Output47
?
"true"20
:
"false"27
)
860
55
                                  : 
(8
I->Output8
?
"1"4
:
"0"4
);
861
55
    FixItHint Fixit1, Fixit2;
862
863
55
    switch (Term ? Term->getStmtClass() : 
Stmt::DeclStmtClass0
) {
864
0
    default:
865
      // Don't know how to report this. Just fall back to 'may be used
866
      // uninitialized'. FIXME: Can this happen?
867
0
      continue;
868
869
    // "condition is true / condition is false".
870
19
    case Stmt::IfStmtClass: {
871
19
      const IfStmt *IS = cast<IfStmt>(Term);
872
19
      DiagKind = 0;
873
19
      Str = "if";
874
19
      Range = IS->getCond()->getSourceRange();
875
19
      RemoveDiagKind = 0;
876
19
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
877
19
                    I->Output, Fixit1, Fixit2);
878
19
      break;
879
0
    }
880
4
    case Stmt::ConditionalOperatorClass: {
881
4
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
882
4
      DiagKind = 0;
883
4
      Str = "?:";
884
4
      Range = CO->getCond()->getSourceRange();
885
4
      RemoveDiagKind = 0;
886
4
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
887
4
                    I->Output, Fixit1, Fixit2);
888
4
      break;
889
0
    }
890
12
    case Stmt::BinaryOperatorClass: {
891
12
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
892
12
      if (!BO->isLogicalOp())
893
0
        continue;
894
12
      DiagKind = 0;
895
12
      Str = BO->getOpcodeStr();
896
12
      Range = BO->getLHS()->getSourceRange();
897
12
      RemoveDiagKind = 0;
898
12
      if ((BO->getOpcode() == BO_LAnd && 
I->Output4
) ||
899
12
          
(10
BO->getOpcode() == BO_LOr10
&&
!I->Output8
))
900
        // true && y -> y, false || y -> y.
901
8
        Fixit1 = FixItHint::CreateRemoval(
902
8
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
903
4
      else
904
        // false && y -> false, true || y -> true.
905
4
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
906
12
      break;
907
12
    }
908
909
    // "loop is entered / loop is exited".
910
4
    case Stmt::WhileStmtClass:
911
4
      DiagKind = 1;
912
4
      Str = "while";
913
4
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
914
4
      RemoveDiagKind = 1;
915
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
916
4
      break;
917
4
    case Stmt::ForStmtClass:
918
4
      DiagKind = 1;
919
4
      Str = "for";
920
4
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
921
4
      RemoveDiagKind = 1;
922
4
      if (I->Output)
923
2
        Fixit1 = FixItHint::CreateRemoval(Range);
924
2
      else
925
2
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
926
4
      break;
927
4
    case Stmt::CXXForRangeStmtClass:
928
4
      if (I->Output == 1) {
929
        // The use occurs if a range-based for loop's body never executes.
930
        // That may be impossible, and there's no syntactic fix for this,
931
        // so treat it as a 'may be uninitialized' case.
932
2
        continue;
933
2
      }
934
2
      DiagKind = 1;
935
2
      Str = "for";
936
2
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
937
2
      break;
938
939
    // "condition is true / loop is exited".
940
4
    case Stmt::DoStmtClass:
941
4
      DiagKind = 2;
942
4
      Str = "do";
943
4
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
944
4
      RemoveDiagKind = 1;
945
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
946
4
      break;
947
948
    // "switch case is taken".
949
2
    case Stmt::CaseStmtClass:
950
2
      DiagKind = 3;
951
2
      Str = "case";
952
2
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
953
2
      break;
954
2
    case Stmt::DefaultStmtClass:
955
2
      DiagKind = 3;
956
2
      Str = "default";
957
2
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
958
2
      break;
959
55
    }
960
961
53
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
962
53
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
963
53
      << Str << I->Output << Range;
964
53
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
965
53
        << IsCapturedByBlock << User->getSourceRange();
966
53
    if (RemoveDiagKind != -1)
967
47
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
968
47
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
969
970
53
    Diagnosed = true;
971
53
  }
972
973
84
  if (!Diagnosed)
974
33
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
975
33
        << VD->getDeclName() << IsCapturedByBlock
976
33
        << Use.getUser()->getSourceRange();
977
84
}
978
979
/// Diagnose uninitialized const reference usages.
980
static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
981
14
                                             const UninitUse &Use) {
982
14
  S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
983
14
      << VD->getDeclName() << Use.getUser()->getSourceRange();
984
14
  return true;
985
14
}
986
987
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
988
/// uninitialized variable. This manages the different forms of diagnostic
989
/// emitted for particular types of uses. Returns true if the use was diagnosed
990
/// as a warning. If a particular use is one we omit warnings for, returns
991
/// false.
992
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
993
                                     const UninitUse &Use,
994
1.09k
                                     bool alwaysReportSelfInit = false) {
995
1.09k
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
996
    // Inspect the initializer of the variable declaration which is
997
    // being referenced prior to its initialization. We emit
998
    // specialized diagnostics for self-initialization, and we
999
    // specifically avoid warning about self references which take the
1000
    // form of:
1001
    //
1002
    //   int x = x;
1003
    //
1004
    // This is used to indicate to GCC that 'x' is intentionally left
1005
    // uninitialized. Proven code paths which access 'x' in
1006
    // an uninitialized state after this will still warn.
1007
1.08k
    if (const Expr *Initializer = VD->getInit()) {
1008
69
      if (!alwaysReportSelfInit && 
DRE == Initializer->IgnoreParenImpCasts()66
)
1009
0
        return false;
1010
1011
69
      ContainsReference CR(S.Context, DRE);
1012
69
      CR.Visit(Initializer);
1013
69
      if (CR.doesContainReference()) {
1014
67
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1015
67
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1016
67
        return true;
1017
67
      }
1018
69
    }
1019
1020
1.02k
    DiagUninitUse(S, VD, Use, false);
1021
1.02k
  } else {
1022
5
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1023
5
    if (VD->getType()->isBlockPointerType() && 
!VD->hasAttr<BlocksAttr>()3
)
1024
3
      S.Diag(BE->getBeginLoc(),
1025
3
             diag::warn_uninit_byref_blockvar_captured_by_block)
1026
3
          << VD->getDeclName()
1027
3
          << VD->getType().getQualifiers().hasObjCLifetime();
1028
2
    else
1029
2
      DiagUninitUse(S, VD, Use, true);
1030
5
  }
1031
1032
  // Report where the variable was declared when the use wasn't within
1033
  // the initializer of that declaration & we didn't already suggest
1034
  // an initialization fixit.
1035
1.02k
  if (!SuggestInitializationFixit(S, VD))
1036
9
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1037
9
        << VD->getDeclName();
1038
1039
1.02k
  return true;
1040
1.09k
}
1041
1042
namespace {
1043
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1044
  public:
1045
    FallthroughMapper(Sema &S)
1046
      : FoundSwitchStatements(false),
1047
122
        S(S) {
1048
122
    }
1049
1050
122
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1051
1052
47
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1053
47
      bool Found = FallthroughStmts.erase(Stmt);
1054
47
      assert(Found);
1055
0
      (void)Found;
1056
47
    }
1057
1058
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1059
1060
96
    const AttrStmts &getFallthroughStmts() const {
1061
96
      return FallthroughStmts;
1062
96
    }
1063
1064
78
    void fillReachableBlocks(CFG *Cfg) {
1065
78
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1066
0
      std::deque<const CFGBlock *> BlockQueue;
1067
1068
78
      ReachableBlocks.insert(&Cfg->getEntry());
1069
78
      BlockQueue.push_back(&Cfg->getEntry());
1070
      // Mark all case blocks reachable to avoid problems with switching on
1071
      // constants, covered enums, etc.
1072
      // These blocks can contain fall-through annotations, and we don't want to
1073
      // issue a warn_fallthrough_attr_unreachable for them.
1074
659
      for (const auto *B : *Cfg) {
1075
659
        const Stmt *L = B->getLabel();
1076
659
        if (L && 
isa<SwitchCase>(L)269
&&
ReachableBlocks.insert(B).second266
)
1077
266
          BlockQueue.push_back(B);
1078
659
      }
1079
1080
706
      while (!BlockQueue.empty()) {
1081
628
        const CFGBlock *P = BlockQueue.front();
1082
628
        BlockQueue.pop_front();
1083
628
        for (CFGBlock::const_succ_iterator I = P->succ_begin(),
1084
628
                                           E = P->succ_end();
1085
1.46k
             I != E; 
++I834
) {
1086
834
          if (*I && 
ReachableBlocks.insert(*I).second812
)
1087
284
            BlockQueue.push_back(*I);
1088
834
        }
1089
628
      }
1090
78
    }
1091
1092
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1093
266
                                   bool IsTemplateInstantiation) {
1094
266
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1095
1096
0
      int UnannotatedCnt = 0;
1097
266
      AnnotatedCnt = 0;
1098
1099
266
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1100
695
      while (!BlockQueue.empty()) {
1101
429
        const CFGBlock *P = BlockQueue.front();
1102
429
        BlockQueue.pop_front();
1103
429
        if (!P) 
continue7
;
1104
1105
422
        const Stmt *Term = P->getTerminatorStmt();
1106
422
        if (Term && 
isa<SwitchStmt>(Term)286
)
1107
264
          continue; // Switch statement, good.
1108
1109
158
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1110
158
        if (SW && 
SW->getSubStmt() == B.getLabel()121
&&
P->begin() == P->end()4
)
1111
4
          continue; // Previous case label has no statements, good.
1112
1113
154
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1114
154
        if (L && 
L->getSubStmt() == B.getLabel()3
&&
P->begin() == P->end()3
)
1115
3
          continue; // Case label is preceded with a normal label, good.
1116
1117
151
        if (!ReachableBlocks.count(P)) {
1118
18
          for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
1119
18
                                                ElemEnd = P->rend();
1120
44
               ElemIt != ElemEnd; 
++ElemIt26
) {
1121
32
            if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
1122
30
              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1123
                // Don't issue a warning for an unreachable fallthrough
1124
                // attribute in template instantiations as it may not be
1125
                // unreachable in all instantiations of the template.
1126
6
                if (!IsTemplateInstantiation)
1127
5
                  S.Diag(AS->getBeginLoc(),
1128
5
                         diag::warn_fallthrough_attr_unreachable);
1129
6
                markFallthroughVisited(AS);
1130
6
                ++AnnotatedCnt;
1131
6
                break;
1132
6
              }
1133
              // Don't care about other unreachable statements.
1134
30
            }
1135
32
          }
1136
          // If there are no unreachable statements, this may be a special
1137
          // case in CFG:
1138
          // case X: {
1139
          //    A a;  // A has a destructor.
1140
          //    break;
1141
          // }
1142
          // // <<<< This place is represented by a 'hanging' CFG block.
1143
          // case Y:
1144
18
          continue;
1145
18
        }
1146
1147
133
        const Stmt *LastStmt = getLastStmt(*P);
1148
133
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1149
41
          markFallthroughVisited(AS);
1150
41
          ++AnnotatedCnt;
1151
41
          continue; // Fallthrough annotation, good.
1152
41
        }
1153
1154
92
        if (!LastStmt) { // This block contains no executable statements.
1155
          // Traverse its predecessors.
1156
2
          std::copy(P->pred_begin(), P->pred_end(),
1157
2
                    std::back_inserter(BlockQueue));
1158
2
          continue;
1159
2
        }
1160
1161
90
        ++UnannotatedCnt;
1162
90
      }
1163
266
      return !!UnannotatedCnt;
1164
266
    }
1165
1166
    // RecursiveASTVisitor setup.
1167
13
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1168
1169
63
    bool VisitAttributedStmt(AttributedStmt *S) {
1170
63
      if (asFallThroughAttr(S))
1171
63
        FallthroughStmts.insert(S);
1172
63
      return true;
1173
63
    }
1174
1175
102
    bool VisitSwitchStmt(SwitchStmt *S) {
1176
102
      FoundSwitchStatements = true;
1177
102
      return true;
1178
102
    }
1179
1180
    // We don't want to traverse local type declarations. We analyze their
1181
    // methods separately.
1182
28
    bool TraverseDecl(Decl *D) { return true; }
1183
1184
    // We analyze lambda bodies separately. Skip them here.
1185
1
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1186
      // Traverse the captures, but not the body.
1187
1
      for (const auto C : zip(LE->captures(), LE->capture_inits()))
1188
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1189
1
      return true;
1190
1
    }
1191
1192
  private:
1193
1194
226
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1195
226
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1196
110
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1197
110
          return AS;
1198
110
      }
1199
116
      return nullptr;
1200
226
    }
1201
1202
133
    static const Stmt *getLastStmt(const CFGBlock &B) {
1203
133
      if (const Stmt *Term = B.getTerminatorStmt())
1204
18
        return Term;
1205
115
      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
1206
115
                                            ElemEnd = B.rend();
1207
115
                                            ElemIt != ElemEnd; 
++ElemIt0
) {
1208
109
        if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
1209
109
          return CS->getStmt();
1210
109
      }
1211
      // Workaround to detect a statement thrown out by CFGBuilder:
1212
      //   case X: {} case Y:
1213
      //   case X: ; case Y:
1214
6
      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1215
4
        if (!isa<SwitchCase>(SW->getSubStmt()))
1216
4
          return SW->getSubStmt();
1217
1218
2
      return nullptr;
1219
6
    }
1220
1221
    bool FoundSwitchStatements;
1222
    AttrStmts FallthroughStmts;
1223
    Sema &S;
1224
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1225
  };
1226
} // anonymous namespace
1227
1228
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1229
65
                                            SourceLocation Loc) {
1230
65
  TokenValue FallthroughTokens[] = {
1231
65
    tok::l_square, tok::l_square,
1232
65
    PP.getIdentifierInfo("fallthrough"),
1233
65
    tok::r_square, tok::r_square
1234
65
  };
1235
1236
65
  TokenValue ClangFallthroughTokens[] = {
1237
65
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1238
65
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1239
65
    tok::r_square, tok::r_square
1240
65
  };
1241
1242
65
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && 
!PP.getLangOpts().C2x39
;
1243
1244
65
  StringRef MacroName;
1245
65
  if (PreferClangAttr)
1246
38
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1247
65
  if (MacroName.empty())
1248
58
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1249
65
  if (MacroName.empty() && 
!PreferClangAttr50
)
1250
22
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1251
65
  if (MacroName.empty()) {
1252
40
    if (!PreferClangAttr)
1253
12
      MacroName = "[[fallthrough]]";
1254
28
    else if (PP.getLangOpts().CPlusPlus)
1255
21
      MacroName = "[[clang::fallthrough]]";
1256
7
    else
1257
7
      MacroName = "__attribute__((fallthrough))";
1258
40
  }
1259
65
  return MacroName;
1260
65
}
1261
1262
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1263
122
                                            bool PerFunction) {
1264
122
  FallthroughMapper FM(S);
1265
122
  FM.TraverseStmt(AC.getBody());
1266
1267
122
  if (!FM.foundSwitchStatements())
1268
43
    return;
1269
1270
79
  if (PerFunction && 
FM.getFallthroughStmts().empty()18
)
1271
1
    return;
1272
1273
78
  CFG *Cfg = AC.getCFG();
1274
1275
78
  if (!Cfg)
1276
0
    return;
1277
1278
78
  FM.fillReachableBlocks(Cfg);
1279
1280
659
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1281
659
    const Stmt *Label = B->getLabel();
1282
1283
659
    if (!Label || 
!isa<SwitchCase>(Label)269
)
1284
393
      continue;
1285
1286
266
    int AnnotatedCnt;
1287
1288
266
    bool IsTemplateInstantiation = false;
1289
266
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1290
263
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1291
266
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1292
266
                                      IsTemplateInstantiation))
1293
184
      continue;
1294
1295
82
    S.Diag(Label->getBeginLoc(),
1296
82
           PerFunction ? 
diag::warn_unannotated_fallthrough_per_function10
1297
82
                       : 
diag::warn_unannotated_fallthrough72
);
1298
1299
82
    if (!AnnotatedCnt) {
1300
81
      SourceLocation L = Label->getBeginLoc();
1301
81
      if (L.isMacroID())
1302
10
        continue;
1303
1304
71
      const Stmt *Term = B->getTerminatorStmt();
1305
      // Skip empty cases.
1306
115
      while (B->empty() && 
!Term50
&&
B->succ_size() == 144
) {
1307
44
        B = *B->succ_begin();
1308
44
        Term = B->getTerminatorStmt();
1309
44
      }
1310
71
      if (!(B->empty() && 
Term6
&&
isa<BreakStmt>(Term)6
)) {
1311
65
        Preprocessor &PP = S.getPreprocessor();
1312
65
        StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1313
65
        SmallString<64> TextToInsert(AnnotationSpelling);
1314
65
        TextToInsert += "; ";
1315
65
        S.Diag(L, diag::note_insert_fallthrough_fixit)
1316
65
            << AnnotationSpelling
1317
65
            << FixItHint::CreateInsertion(L, TextToInsert);
1318
65
      }
1319
71
      S.Diag(L, diag::note_insert_break_fixit)
1320
71
          << FixItHint::CreateInsertion(L, "break; ");
1321
71
    }
1322
82
  }
1323
1324
78
  for (const auto *F : FM.getFallthroughStmts())
1325
16
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1326
78
}
1327
1328
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1329
69
                     const Stmt *S) {
1330
69
  assert(S);
1331
1332
417
  do {
1333
417
    switch (S->getStmtClass()) {
1334
2
    case Stmt::ForStmtClass:
1335
6
    case Stmt::WhileStmtClass:
1336
8
    case Stmt::CXXForRangeStmtClass:
1337
18
    case Stmt::ObjCForCollectionStmtClass:
1338
18
      return true;
1339
4
    case Stmt::DoStmtClass: {
1340
4
      Expr::EvalResult Result;
1341
4
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1342
2
        return true;
1343
2
      return Result.Val.getInt().getBoolValue();
1344
4
    }
1345
395
    default:
1346
395
      break;
1347
417
    }
1348
417
  } while (
(S = PM.getParent(S))395
);
1349
1350
47
  return false;
1351
69
}
1352
1353
static void diagnoseRepeatedUseOfWeak(Sema &S,
1354
                                      const sema::FunctionScopeInfo *CurFn,
1355
                                      const Decl *D,
1356
112
                                      const ParentMap &PM) {
1357
112
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1358
112
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1359
112
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1360
112
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1361
112
  StmtUsesPair;
1362
1363
112
  ASTContext &Ctx = S.getASTContext();
1364
1365
112
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1366
1367
  // Extract all weak objects that are referenced more than once.
1368
112
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1369
112
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1370
267
       I != E; 
++I155
) {
1371
155
    const WeakUseVector &Uses = I->second;
1372
1373
    // Find the first read of the weak object.
1374
155
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1375
217
    for ( ; UI != UE; 
++UI62
) {
1376
197
      if (UI->isUnsafe())
1377
135
        break;
1378
197
    }
1379
1380
    // If there were only writes to this object, don't warn.
1381
155
    if (UI == UE)
1382
20
      continue;
1383
1384
    // If there was only one read, followed by any number of writes, and the
1385
    // read is not within a loop, don't warn. Additionally, don't warn in a
1386
    // loop if the base object is a local variable -- local variables are often
1387
    // changed in loops.
1388
135
    if (UI == Uses.begin()) {
1389
117
      WeakUseVector::const_iterator UI2 = UI;
1390
143
      for (++UI2; UI2 != UE; 
++UI226
)
1391
74
        if (UI2->isUnsafe())
1392
48
          break;
1393
1394
117
      if (UI2 == UE) {
1395
69
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1396
49
          continue;
1397
1398
20
        const WeakObjectProfileTy &Profile = I->first;
1399
20
        if (!Profile.isExactProfile())
1400
2
          continue;
1401
1402
18
        const NamedDecl *Base = Profile.getBase();
1403
18
        if (!Base)
1404
2
          Base = Profile.getProperty();
1405
18
        assert(Base && "A profile always has a base or property.");
1406
1407
18
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1408
18
          if (BaseVar->hasLocalStorage() && 
!isa<ParmVarDecl>(Base)16
)
1409
2
            continue;
1410
18
      }
1411
117
    }
1412
1413
82
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1414
82
  }
1415
1416
112
  if (UsesByStmt.empty())
1417
46
    return;
1418
1419
  // Sort by first use so that we emit the warnings in a deterministic order.
1420
66
  SourceManager &SM = S.getSourceManager();
1421
66
  llvm::sort(UsesByStmt,
1422
66
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1423
26
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1424
26
                                                   RHS.first->getBeginLoc());
1425
26
             });
1426
1427
  // Classify the current code body for better warning text.
1428
  // This enum should stay in sync with the cases in
1429
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1430
  // FIXME: Should we use a common classification enum and the same set of
1431
  // possibilities all throughout Sema?
1432
66
  enum {
1433
66
    Function,
1434
66
    Method,
1435
66
    Block,
1436
66
    Lambda
1437
66
  } FunctionKind;
1438
1439
66
  if (isa<sema::BlockScopeInfo>(CurFn))
1440
2
    FunctionKind = Block;
1441
64
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1442
0
    FunctionKind = Lambda;
1443
64
  else if (isa<ObjCMethodDecl>(D))
1444
6
    FunctionKind = Method;
1445
58
  else
1446
58
    FunctionKind = Function;
1447
1448
  // Iterate through the sorted problems and emit warnings for each.
1449
82
  for (const auto &P : UsesByStmt) {
1450
82
    const Stmt *FirstRead = P.first;
1451
82
    const WeakObjectProfileTy &Key = P.second->first;
1452
82
    const WeakUseVector &Uses = P.second->second;
1453
1454
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1455
    // may not contain enough information to determine that these are different
1456
    // properties. We can only be 100% sure of a repeated use in certain cases,
1457
    // and we adjust the diagnostic kind accordingly so that the less certain
1458
    // case can be turned off if it is too noisy.
1459
82
    unsigned DiagKind;
1460
82
    if (Key.isExactProfile())
1461
68
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1462
14
    else
1463
14
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1464
1465
    // Classify the weak object being accessed for better warning text.
1466
    // This enum should stay in sync with the cases in
1467
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1468
82
    enum {
1469
82
      Variable,
1470
82
      Property,
1471
82
      ImplicitProperty,
1472
82
      Ivar
1473
82
    } ObjectKind;
1474
1475
82
    const NamedDecl *KeyProp = Key.getProperty();
1476
82
    if (isa<VarDecl>(KeyProp))
1477
6
      ObjectKind = Variable;
1478
76
    else if (isa<ObjCPropertyDecl>(KeyProp))
1479
64
      ObjectKind = Property;
1480
12
    else if (isa<ObjCMethodDecl>(KeyProp))
1481
4
      ObjectKind = ImplicitProperty;
1482
8
    else if (isa<ObjCIvarDecl>(KeyProp))
1483
8
      ObjectKind = Ivar;
1484
0
    else
1485
0
      llvm_unreachable("Unexpected weak object kind!");
1486
1487
    // Do not warn about IBOutlet weak property receivers being set to null
1488
    // since they are typically only used from the main thread.
1489
82
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1490
64
      if (Prop->hasAttr<IBOutletAttr>())
1491
4
        continue;
1492
1493
    // Show the first time the object was read.
1494
78
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1495
78
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1496
78
        << FirstRead->getSourceRange();
1497
1498
    // Print all the other accesses as notes.
1499
164
    for (const auto &Use : Uses) {
1500
164
      if (Use.getUseExpr() == FirstRead)
1501
78
        continue;
1502
86
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1503
86
             diag::note_arc_weak_also_accessed_here)
1504
86
          << Use.getUseExpr()->getSourceRange();
1505
86
    }
1506
78
  }
1507
66
}
1508
1509
namespace clang {
1510
namespace {
1511
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1512
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1513
typedef std::list<DelayedDiag> DiagList;
1514
1515
struct SortDiagBySourceLocation {
1516
  SourceManager &SM;
1517
2.30k
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1518
1519
2.52k
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1520
    // Although this call will be slow, this is only called when outputting
1521
    // multiple warnings.
1522
2.52k
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1523
2.52k
  }
1524
};
1525
} // anonymous namespace
1526
} // namespace clang
1527
1528
namespace {
1529
class UninitValsDiagReporter : public UninitVariablesHandler {
1530
  Sema &S;
1531
  typedef SmallVector<UninitUse, 2> UsesVec;
1532
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1533
  // Prefer using MapVector to DenseMap, so that iteration order will be
1534
  // the same as insertion order. This is needed to obtain a deterministic
1535
  // order of diagnostics when calling flushDiagnostics().
1536
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1537
  UsesMap uses;
1538
  UsesMap constRefUses;
1539
1540
public:
1541
53.0k
  UninitValsDiagReporter(Sema &S) : S(S) {}
1542
53.0k
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1543
1544
2.13k
  MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1545
2.13k
    MappedType &V = um[vd];
1546
2.13k
    if (!V.getPointer())
1547
1.11k
      V.setPointer(new UsesVec());
1548
2.13k
    return V;
1549
2.13k
  }
1550
1551
  void handleUseOfUninitVariable(const VarDecl *vd,
1552
2.09k
                                 const UninitUse &use) override {
1553
2.09k
    getUses(uses, vd).getPointer()->push_back(use);
1554
2.09k
  }
1555
1556
  void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1557
15
                                         const UninitUse &use) override {
1558
15
    getUses(constRefUses, vd).getPointer()->push_back(use);
1559
15
  }
1560
1561
9
  void handleSelfInit(const VarDecl *vd) override {
1562
9
    getUses(uses, vd).setInt(true);
1563
9
    getUses(constRefUses, vd).setInt(true);
1564
9
  }
1565
1566
53.0k
  void flushDiagnostics() {
1567
53.0k
    for (const auto &P : uses) {
1568
1.09k
      const VarDecl *vd = P.first;
1569
1.09k
      const MappedType &V = P.second;
1570
1571
1.09k
      UsesVec *vec = V.getPointer();
1572
1.09k
      bool hasSelfInit = V.getInt();
1573
1574
      // Specially handle the case where we have uses of an uninitialized
1575
      // variable, but the root cause is an idiomatic self-init.  We want
1576
      // to report the diagnostic at the self-init since that is the root cause.
1577
1.09k
      if (!vec->empty() && 
hasSelfInit1.09k
&&
hasAlwaysUninitializedUse(vec)4
)
1578
2
        DiagnoseUninitializedUse(S, vd,
1579
2
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1580
2
                                           /* isAlwaysUninit */ true),
1581
2
                                 /* alwaysReportSelfInit */ true);
1582
1.09k
      else {
1583
        // Sort the uses by their SourceLocations.  While not strictly
1584
        // guaranteed to produce them in line/column order, this will provide
1585
        // a stable ordering.
1586
1.09k
        llvm::sort(vec->begin(), vec->end(),
1587
1.09k
                   [](const UninitUse &a, const UninitUse &b) {
1588
          // Prefer a more confident report over a less confident one.
1589
1.00k
          if (a.getKind() != b.getKind())
1590
5
            return a.getKind() > b.getKind();
1591
1.00k
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1592
1.00k
        });
1593
1594
1.09k
        for (const auto &U : *vec) {
1595
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1596
1.08k
          UninitUse Use = hasSelfInit ? 
UninitUse(U.getUser(), false)2
:
U1.08k
;
1597
1598
1.08k
          if (DiagnoseUninitializedUse(S, vd, Use))
1599
            // Skip further diagnostics for this variable. We try to warn only
1600
            // on the first point at which a variable is used uninitialized.
1601
1.08k
            break;
1602
1.08k
        }
1603
1.09k
      }
1604
1605
      // Release the uses vector.
1606
1.09k
      delete vec;
1607
1.09k
    }
1608
1609
53.0k
    uses.clear();
1610
1611
    // Flush all const reference uses diags.
1612
53.0k
    for (const auto &P : constRefUses) {
1613
23
      const VarDecl *vd = P.first;
1614
23
      const MappedType &V = P.second;
1615
1616
23
      UsesVec *vec = V.getPointer();
1617
23
      bool hasSelfInit = V.getInt();
1618
1619
23
      if (!vec->empty() && 
hasSelfInit15
&&
hasAlwaysUninitializedUse(vec)1
)
1620
1
        DiagnoseUninitializedUse(S, vd,
1621
1
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1622
1
                                           /* isAlwaysUninit */ true),
1623
1
                                 /* alwaysReportSelfInit */ true);
1624
22
      else {
1625
22
        for (const auto &U : *vec) {
1626
14
          if (DiagnoseUninitializedConstRefUse(S, vd, U))
1627
14
            break;
1628
14
        }
1629
22
      }
1630
1631
      // Release the uses vector.
1632
23
      delete vec;
1633
23
    }
1634
1635
53.0k
    constRefUses.clear();
1636
53.0k
  }
1637
1638
private:
1639
5
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1640
5
    return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
1641
5
      return U.getKind() == UninitUse::Always ||
1642
5
             
U.getKind() == UninitUse::AfterCall2
||
1643
5
             
U.getKind() == UninitUse::AfterDecl2
;
1644
5
    });
1645
5
  }
1646
};
1647
1648
/// Inter-procedural data for the called-once checker.
1649
class CalledOnceInterProceduralData {
1650
public:
1651
  // Add the delayed warning for the given block.
1652
  void addDelayedWarning(const BlockDecl *Block,
1653
8
                         PartialDiagnosticAt &&Warning) {
1654
8
    DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1655
8
  }
1656
  // Report all of the warnings we've gathered for the given block.
1657
9
  void flushWarnings(const BlockDecl *Block, Sema &S) {
1658
9
    for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1659
5
      S.Diag(Delayed.first, Delayed.second);
1660
1661
9
    discardWarnings(Block);
1662
9
  }
1663
  // Discard all of the warnings we've gathered for the given block.
1664
16
  void discardWarnings(const BlockDecl *Block) {
1665
16
    DelayedBlockWarnings.erase(Block);
1666
16
  }
1667
1668
private:
1669
  using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1670
  llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1671
};
1672
1673
class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1674
public:
1675
  CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1676
10.2k
      : S(S), Data(Data) {}
1677
  void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1678
                        const Expr *PrevCall, bool IsCompletionHandler,
1679
29
                        bool Poised) override {
1680
29
    auto DiagToReport = IsCompletionHandler
1681
29
                            ? 
diag::warn_completion_handler_called_twice7
1682
29
                            : 
diag::warn_called_once_gets_called_twice22
;
1683
29
    S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1684
29
    S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1685
29
        << Poised;
1686
29
  }
1687
1688
  void handleNeverCalled(const ParmVarDecl *Parameter,
1689
10
                         bool IsCompletionHandler) override {
1690
10
    auto DiagToReport = IsCompletionHandler
1691
10
                            ? 
diag::warn_completion_handler_never_called4
1692
10
                            : 
diag::warn_called_once_never_called6
;
1693
10
    S.Diag(Parameter->getBeginLoc(), DiagToReport)
1694
10
        << Parameter << /* Captured */ false;
1695
10
  }
1696
1697
  void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1698
                         const Stmt *Where, NeverCalledReason Reason,
1699
                         bool IsCalledDirectly,
1700
43
                         bool IsCompletionHandler) override {
1701
43
    auto DiagToReport = IsCompletionHandler
1702
43
                            ? 
diag::warn_completion_handler_never_called_when19
1703
43
                            : 
diag::warn_called_once_never_called_when24
;
1704
43
    PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1705
43
                                                          << Parameter
1706
43
                                                          << IsCalledDirectly
1707
43
                                                          << (unsigned)Reason);
1708
1709
43
    if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1710
      // We shouldn't report these warnings on blocks immediately
1711
8
      Data.addDelayedWarning(Block, std::move(Warning));
1712
35
    } else {
1713
35
      S.Diag(Warning.first, Warning.second);
1714
35
    }
1715
43
  }
1716
1717
  void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1718
                                 const Decl *Where,
1719
0
                                 bool IsCompletionHandler) override {
1720
0
    auto DiagToReport = IsCompletionHandler
1721
0
                            ? diag::warn_completion_handler_never_called
1722
0
                            : diag::warn_called_once_never_called;
1723
0
    S.Diag(Where->getBeginLoc(), DiagToReport)
1724
0
        << Parameter << /* Captured */ true;
1725
0
  }
1726
1727
  void
1728
9
  handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1729
9
    Data.flushWarnings(Block, S);
1730
9
  }
1731
1732
7
  void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1733
7
    Data.discardWarnings(Block);
1734
7
  }
1735
1736
private:
1737
  Sema &S;
1738
  CalledOnceInterProceduralData &Data;
1739
};
1740
1741
constexpr unsigned CalledOnceWarnings[] = {
1742
    diag::warn_called_once_never_called,
1743
    diag::warn_called_once_never_called_when,
1744
    diag::warn_called_once_gets_called_twice};
1745
1746
constexpr unsigned CompletionHandlerWarnings[]{
1747
    diag::warn_completion_handler_never_called,
1748
    diag::warn_completion_handler_never_called_when,
1749
    diag::warn_completion_handler_called_twice};
1750
1751
bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1752
                                 const DiagnosticsEngine &Diags,
1753
20.7k
                                 SourceLocation At) {
1754
40.9k
  return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1755
40.9k
    return !Diags.isIgnored(DiagID, At);
1756
40.9k
  });
1757
20.7k
}
1758
1759
bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1760
10.2k
                                        SourceLocation At) {
1761
10.2k
  return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1762
10.2k
}
1763
1764
bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1765
10.4k
                                       SourceLocation At) {
1766
10.4k
  return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1767
10.4k
         
shouldAnalyzeCalledOnceConventions(Diags, At)30
;
1768
10.4k
}
1769
} // anonymous namespace
1770
1771
//===----------------------------------------------------------------------===//
1772
// -Wthread-safety
1773
//===----------------------------------------------------------------------===//
1774
namespace clang {
1775
namespace threadSafety {
1776
namespace {
1777
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1778
  Sema &S;
1779
  DiagList Warnings;
1780
  SourceLocation FunLocation, FunEndLocation;
1781
1782
  const FunctionDecl *CurrentFunction;
1783
  bool Verbose;
1784
1785
2.46k
  OptionalNotes getNotes() const {
1786
2.46k
    if (Verbose && 
CurrentFunction14
) {
1787
14
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1788
14
                                S.PDiag(diag::note_thread_warning_in_fun)
1789
14
                                    << CurrentFunction);
1790
14
      return OptionalNotes(1, FNote);
1791
14
    }
1792
2.45k
    return OptionalNotes();
1793
2.46k
  }
1794
1795
434
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1796
434
    OptionalNotes ONS(1, Note);
1797
434
    if (Verbose && 
CurrentFunction6
) {
1798
6
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1799
6
                                S.PDiag(diag::note_thread_warning_in_fun)
1800
6
                                    << CurrentFunction);
1801
6
      ONS.push_back(std::move(FNote));
1802
6
    }
1803
434
    return ONS;
1804
434
  }
1805
1806
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1807
0
                         const PartialDiagnosticAt &Note2) const {
1808
0
    OptionalNotes ONS;
1809
0
    ONS.push_back(Note1);
1810
0
    ONS.push_back(Note2);
1811
0
    if (Verbose && CurrentFunction) {
1812
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1813
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1814
0
                                    << CurrentFunction);
1815
0
      ONS.push_back(std::move(FNote));
1816
0
    }
1817
0
    return ONS;
1818
0
  }
1819
1820
270
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1821
270
    return LocLocked.isValid()
1822
270
               ? getNotes(PartialDiagnosticAt(
1823
270
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1824
270
               : 
getNotes()0
;
1825
270
  }
1826
1827
  OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1828
106
                                     StringRef Kind) {
1829
106
    return LocUnlocked.isValid()
1830
106
               ? getNotes(PartialDiagnosticAt(
1831
54
                     LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1832
106
               : 
getNotes()52
;
1833
106
  }
1834
1835
 public:
1836
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1837
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1838
2.21k
      CurrentFunction(nullptr), Verbose(false) {}
1839
1840
41
  void setVerbose(bool b) { Verbose = b; }
1841
1842
  /// Emit all buffered diagnostics in order of sourcelocation.
1843
  /// We need to output diagnostics produced while iterating through
1844
  /// the lockset in deterministic order, so this function orders diagnostics
1845
  /// and outputs them.
1846
2.21k
  void emitDiagnostics() {
1847
2.21k
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1848
2.90k
    for (const auto &Diag : Warnings) {
1849
2.90k
      S.Diag(Diag.first.first, Diag.first.second);
1850
2.90k
      for (const auto &Note : Diag.second)
1851
454
        S.Diag(Note.first, Note.second);
1852
2.90k
    }
1853
2.21k
  }
1854
1855
0
  void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1856
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1857
0
                                         << Loc);
1858
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1859
0
  }
1860
1861
  void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1862
106
                             SourceLocation LocPreviousUnlock) override {
1863
106
    if (Loc.isInvalid())
1864
0
      Loc = FunLocation;
1865
106
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1866
106
                                         << Kind << LockName);
1867
106
    Warnings.emplace_back(std::move(Warning),
1868
106
                          makeUnlockedHereNote(LocPreviousUnlock, Kind));
1869
106
  }
1870
1871
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1872
                                 LockKind Expected, LockKind Received,
1873
                                 SourceLocation LocLocked,
1874
18
                                 SourceLocation LocUnlock) override {
1875
18
    if (LocUnlock.isInvalid())
1876
0
      LocUnlock = FunLocation;
1877
18
    PartialDiagnosticAt Warning(
1878
18
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1879
18
                       << Kind << LockName << Received << Expected);
1880
18
    Warnings.emplace_back(std::move(Warning),
1881
18
                          makeLockedHereNote(LocLocked, Kind));
1882
18
  }
1883
1884
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1885
79
                        SourceLocation LocDoubleLock) override {
1886
79
    if (LocDoubleLock.isInvalid())
1887
0
      LocDoubleLock = FunLocation;
1888
79
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1889
79
                                                   << Kind << LockName);
1890
79
    Warnings.emplace_back(std::move(Warning),
1891
79
                          makeLockedHereNote(LocLocked, Kind));
1892
79
  }
1893
1894
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1895
                                 SourceLocation LocLocked,
1896
                                 SourceLocation LocEndOfScope,
1897
173
                                 LockErrorKind LEK) override {
1898
173
    unsigned DiagID = 0;
1899
173
    switch (LEK) {
1900
42
      case LEK_LockedSomePredecessors:
1901
42
        DiagID = diag::warn_lock_some_predecessors;
1902
42
        break;
1903
34
      case LEK_LockedSomeLoopIterations:
1904
34
        DiagID = diag::warn_expecting_lock_held_on_loop;
1905
34
        break;
1906
65
      case LEK_LockedAtEndOfFunction:
1907
65
        DiagID = diag::warn_no_unlock;
1908
65
        break;
1909
32
      case LEK_NotLockedAtEndOfFunction:
1910
32
        DiagID = diag::warn_expecting_locked;
1911
32
        break;
1912
173
    }
1913
173
    if (LocEndOfScope.isInvalid())
1914
127
      LocEndOfScope = FunEndLocation;
1915
1916
173
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1917
173
                                                               << LockName);
1918
173
    Warnings.emplace_back(std::move(Warning),
1919
173
                          makeLockedHereNote(LocLocked, Kind));
1920
173
  }
1921
1922
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1923
                                SourceLocation Loc1,
1924
28
                                SourceLocation Loc2) override {
1925
28
    PartialDiagnosticAt Warning(Loc1,
1926
28
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1927
28
                                    << Kind << LockName);
1928
28
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1929
28
                                       << Kind << LockName);
1930
28
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1931
28
  }
1932
1933
  void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1934
                         ProtectedOperationKind POK, AccessKind AK,
1935
34
                         SourceLocation Loc) override {
1936
34
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1937
34
           "Only works for variables");
1938
34
    unsigned DiagID = POK == POK_VarAccess?
1939
25
                        diag::warn_variable_requires_any_lock:
1940
34
                        
diag::warn_var_deref_requires_any_lock9
;
1941
34
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1942
34
      << D << getLockKindFromAccessKind(AK));
1943
34
    Warnings.emplace_back(std::move(Warning), getNotes());
1944
34
  }
1945
1946
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1947
                          ProtectedOperationKind POK, Name LockName,
1948
                          LockKind LK, SourceLocation Loc,
1949
1.21k
                          Name *PossibleMatch) override {
1950
1.21k
    unsigned DiagID = 0;
1951
1.21k
    if (PossibleMatch) {
1952
76
      switch (POK) {
1953
56
        case POK_VarAccess:
1954
56
          DiagID = diag::warn_variable_requires_lock_precise;
1955
56
          break;
1956
0
        case POK_VarDereference:
1957
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1958
0
          break;
1959
20
        case POK_FunctionCall:
1960
20
          DiagID = diag::warn_fun_requires_lock_precise;
1961
20
          break;
1962
0
        case POK_PassByRef:
1963
0
          DiagID = diag::warn_guarded_pass_by_reference;
1964
0
          break;
1965
0
        case POK_PtPassByRef:
1966
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1967
0
          break;
1968
76
      }
1969
76
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1970
76
                                                       << D
1971
76
                                                       << LockName << LK);
1972
76
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1973
76
                                        << *PossibleMatch);
1974
76
      if (Verbose && 
POK == POK_VarAccess0
) {
1975
0
        PartialDiagnosticAt VNote(D->getLocation(),
1976
0
                                  S.PDiag(diag::note_guarded_by_declared_here)
1977
0
                                      << D->getDeclName());
1978
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1979
0
      } else
1980
76
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1981
1.13k
    } else {
1982
1.13k
      switch (POK) {
1983
618
        case POK_VarAccess:
1984
618
          DiagID = diag::warn_variable_requires_lock;
1985
618
          break;
1986
204
        case POK_VarDereference:
1987
204
          DiagID = diag::warn_var_deref_requires_lock;
1988
204
          break;
1989
206
        case POK_FunctionCall:
1990
206
          DiagID = diag::warn_fun_requires_lock;
1991
206
          break;
1992
88
        case POK_PassByRef:
1993
88
          DiagID = diag::warn_guarded_pass_by_reference;
1994
88
          break;
1995
20
        case POK_PtPassByRef:
1996
20
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1997
20
          break;
1998
1.13k
      }
1999
1.13k
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2000
1.13k
                                                       << D
2001
1.13k
                                                       << LockName << LK);
2002
1.13k
      if (Verbose && 
POK == POK_VarAccess12
) {
2003
6
        PartialDiagnosticAt Note(D->getLocation(),
2004
6
                                 S.PDiag(diag::note_guarded_by_declared_here));
2005
6
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
2006
6
      } else
2007
1.13k
        Warnings.emplace_back(std::move(Warning), getNotes());
2008
1.13k
    }
2009
1.21k
  }
2010
2011
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2012
1.09k
                             SourceLocation Loc) override {
2013
1.09k
    PartialDiagnosticAt Warning(Loc,
2014
1.09k
        S.PDiag(diag::warn_acquire_requires_negative_cap)
2015
1.09k
        << Kind << LockName << Neg);
2016
1.09k
    Warnings.emplace_back(std::move(Warning), getNotes());
2017
1.09k
  }
2018
2019
  void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2020
14
                             SourceLocation Loc) override {
2021
14
    PartialDiagnosticAt Warning(
2022
14
        Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2023
14
    Warnings.emplace_back(std::move(Warning), getNotes());
2024
14
  }
2025
2026
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2027
77
                             SourceLocation Loc) override {
2028
77
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2029
77
                                         << Kind << FunName << LockName);
2030
77
    Warnings.emplace_back(std::move(Warning), getNotes());
2031
77
  }
2032
2033
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2034
49
                                SourceLocation Loc) override {
2035
49
    PartialDiagnosticAt Warning(Loc,
2036
49
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2037
49
    Warnings.emplace_back(std::move(Warning), getNotes());
2038
49
  }
2039
2040
20
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2041
20
    PartialDiagnosticAt Warning(Loc,
2042
20
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2043
20
    Warnings.emplace_back(std::move(Warning), getNotes());
2044
20
  }
2045
2046
2.09k
  void enterFunction(const FunctionDecl* FD) override {
2047
2.09k
    CurrentFunction = FD;
2048
2.09k
  }
2049
2050
2.03k
  void leaveFunction(const FunctionDecl* FD) override {
2051
2.03k
    CurrentFunction = nullptr;
2052
2.03k
  }
2053
};
2054
} // anonymous namespace
2055
} // namespace threadSafety
2056
} // namespace clang
2057
2058
//===----------------------------------------------------------------------===//
2059
// -Wconsumed
2060
//===----------------------------------------------------------------------===//
2061
2062
namespace clang {
2063
namespace consumed {
2064
namespace {
2065
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2066
2067
  Sema &S;
2068
  DiagList Warnings;
2069
2070
public:
2071
2072
97
  ConsumedWarningsHandler(Sema &S) : S(S) {}
2073
2074
94
  void emitDiagnostics() override {
2075
94
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2076
110
    for (const auto &Diag : Warnings) {
2077
110
      S.Diag(Diag.first.first, Diag.first.second);
2078
110
      for (const auto &Note : Diag.second)
2079
0
        S.Diag(Note.first, Note.second);
2080
110
    }
2081
94
  }
2082
2083
  void warnLoopStateMismatch(SourceLocation Loc,
2084
2
                             StringRef VariableName) override {
2085
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2086
2
      VariableName);
2087
2088
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2089
2
  }
2090
2091
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
2092
                                        StringRef VariableName,
2093
                                        StringRef ExpectedState,
2094
2
                                        StringRef ObservedState) override {
2095
2096
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2097
2
      diag::warn_param_return_typestate_mismatch) << VariableName <<
2098
2
        ExpectedState << ObservedState);
2099
2100
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2101
2
  }
2102
2103
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2104
6
                                  StringRef ObservedState) override {
2105
2106
6
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2107
6
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2108
2109
6
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2110
6
  }
2111
2112
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2113
1
                                              StringRef TypeName) override {
2114
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2115
1
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2116
2117
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2118
1
  }
2119
2120
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2121
1
                                   StringRef ObservedState) override {
2122
2123
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2124
1
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2125
2126
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2127
1
  }
2128
2129
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2130
4
                                   SourceLocation Loc) override {
2131
2132
4
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2133
4
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2134
2135
4
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2136
4
  }
2137
2138
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2139
94
                             StringRef State, SourceLocation Loc) override {
2140
2141
94
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2142
94
                                MethodName << VariableName << State);
2143
2144
94
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2145
94
  }
2146
};
2147
} // anonymous namespace
2148
} // namespace consumed
2149
} // namespace clang
2150
2151
//===----------------------------------------------------------------------===//
2152
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2153
//  warnings on a function, method, or block.
2154
//===----------------------------------------------------------------------===//
2155
2156
93.3k
sema::AnalysisBasedWarnings::Policy::Policy() {
2157
93.3k
  enableCheckFallThrough = 1;
2158
93.3k
  enableCheckUnreachable = 0;
2159
93.3k
  enableThreadSafetyAnalysis = 0;
2160
93.3k
  enableConsumedAnalysis = 0;
2161
93.3k
}
2162
2163
/// InterProceduralData aims to be a storage of whatever data should be passed
2164
/// between analyses of different functions.
2165
///
2166
/// At the moment, its primary goal is to make the information gathered during
2167
/// the analysis of the blocks available during the analysis of the enclosing
2168
/// function.  This is important due to the fact that blocks are analyzed before
2169
/// the enclosed function is even parsed fully, so it is not viable to access
2170
/// anything in the outer scope while analyzing the block.  On the other hand,
2171
/// re-building CFG for blocks and re-analyzing them when we do have all the
2172
/// information (i.e. during the analysis of the enclosing function) seems to be
2173
/// ill-designed.
2174
class sema::AnalysisBasedWarnings::InterProceduralData {
2175
public:
2176
  // It is important to analyze blocks within functions because it's a very
2177
  // common pattern to capture completion handler parameters by blocks.
2178
  CalledOnceInterProceduralData CalledOnceData;
2179
};
2180
2181
559k
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2182
559k
  return (unsigned)!D.isIgnored(diag, SourceLocation());
2183
559k
}
2184
2185
sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2186
    : S(s), IPData(std::make_unique<InterProceduralData>()),
2187
      NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2188
      MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2189
      NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2190
      NumUninitAnalysisBlockVisits(0),
2191
93.3k
      MaxUninitAnalysisBlockVisitsPerFunction(0) {
2192
2193
93.3k
  using namespace diag;
2194
93.3k
  DiagnosticsEngine &D = S.getDiagnostics();
2195
2196
93.3k
  DefaultPolicy.enableCheckUnreachable =
2197
93.3k
      isEnabled(D, warn_unreachable) || 
isEnabled(D, warn_unreachable_break)93.2k
||
2198
93.3k
      
isEnabled(D, warn_unreachable_return)93.2k
||
2199
93.3k
      
isEnabled(D, warn_unreachable_loop_increment)93.2k
;
2200
2201
93.3k
  DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2202
2203
93.3k
  DefaultPolicy.enableConsumedAnalysis =
2204
93.3k
      isEnabled(D, warn_use_in_invalid_state);
2205
93.3k
}
2206
2207
// We need this here for unique_ptr with forward declared class.
2208
88.7k
sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2209
2210
595
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2211
595
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2212
0
    S.Diag(D.Loc, D.PD);
2213
595
}
2214
2215
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2216
    sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2217
3.13M
    const Decl *D, QualType BlockType) {
2218
2219
  // We avoid doing analysis-based warnings when there are errors for
2220
  // two reasons:
2221
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2222
  //     don't bother trying.
2223
  // (2) The code already has problems; running the analysis just takes more
2224
  //     time.
2225
3.13M
  DiagnosticsEngine &Diags = S.getDiagnostics();
2226
2227
  // Do not do any analysis if we are going to just ignore them.
2228
3.13M
  if (Diags.getIgnoreAllWarnings() ||
2229
3.13M
      
(3.05M
Diags.getSuppressSystemWarnings()3.05M
&&
2230
3.05M
       
S.SourceMgr.isInSystemHeader(D->getLocation())3.04M
))
2231
2.83M
    return;
2232
2233
  // For code in dependent contexts, we'll do this at instantiation time.
2234
296k
  if (cast<DeclContext>(D)->isDependentContext())
2235
10.5k
    return;
2236
2237
286k
  if (S.hasUncompilableErrorOccurred()) {
2238
    // Flush out any possibly unreachable diagnostics.
2239
595
    flushDiagnostics(S, fscope);
2240
595
    return;
2241
595
  }
2242
2243
285k
  const Stmt *Body = D->getBody();
2244
285k
  assert(Body);
2245
2246
  // Construct the analysis context with the specified CFG build options.
2247
0
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2248
2249
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2250
  // explosion for destructors that can result and the compile time hit.
2251
285k
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2252
285k
  AC.getCFGBuildOptions().AddEHEdges = false;
2253
285k
  AC.getCFGBuildOptions().AddInitializers = true;
2254
285k
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2255
285k
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2256
285k
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2257
285k
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2258
2259
  // Force that certain expressions appear as CFGElements in the CFG.  This
2260
  // is used to speed up various analyses.
2261
  // FIXME: This isn't the right factoring.  This is here for initial
2262
  // prototyping, but we need a way for analyses to say what expressions they
2263
  // expect to always be CFGElements and then fill in the BuildOptions
2264
  // appropriately.  This is essentially a layering violation.
2265
285k
  if (P.enableCheckUnreachable || 
P.enableThreadSafetyAnalysis285k
||
2266
285k
      
P.enableConsumedAnalysis283k
) {
2267
    // Unreachable code analysis and thread safety require a linearized CFG.
2268
2.42k
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2269
2.42k
  }
2270
283k
  else {
2271
283k
    AC.getCFGBuildOptions()
2272
283k
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2273
283k
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2274
283k
      .setAlwaysAdd(Stmt::BlockExprClass)
2275
283k
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2276
283k
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2277
283k
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2278
283k
      .setAlwaysAdd(Stmt::UnaryOperatorClass)
2279
283k
      .setAlwaysAdd(Stmt::AttributedStmtClass);
2280
283k
  }
2281
2282
  // Install the logical handler.
2283
285k
  llvm::Optional<LogicalErrorHandler> LEH;
2284
285k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2285
38.8k
    LEH.emplace(S);
2286
38.8k
    AC.getCFGBuildOptions().Observer = &*LEH;
2287
38.8k
  }
2288
2289
  // Emit delayed diagnostics.
2290
285k
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2291
3.17k
    bool analyzed = false;
2292
2293
    // Register the expressions with the CFGBuilder.
2294
8.76k
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2295
8.76k
      for (const Stmt *S : D.Stmts)
2296
9.07k
        AC.registerForcedBlockExpression(S);
2297
8.76k
    }
2298
2299
3.17k
    if (AC.getCFG()) {
2300
3.17k
      analyzed = true;
2301
8.76k
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2302
8.76k
        bool AllReachable = true;
2303
9.07k
        for (const Stmt *S : D.Stmts) {
2304
9.07k
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2305
9.07k
          CFGReverseBlockReachabilityAnalysis *cra =
2306
9.07k
              AC.getCFGReachablityAnalysis();
2307
          // FIXME: We should be able to assert that block is non-null, but
2308
          // the CFG analysis can skip potentially-evaluated expressions in
2309
          // edge cases; see test/Sema/vla-2.c.
2310
9.07k
          if (block && 
cra3.27k
) {
2311
            // Can this block be reached from the entrance?
2312
3.27k
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2313
67
              AllReachable = false;
2314
67
              break;
2315
67
            }
2316
3.27k
          }
2317
          // If we cannot map to a basic block, assume the statement is
2318
          // reachable.
2319
9.07k
        }
2320
2321
8.76k
        if (AllReachable)
2322
8.69k
          S.Diag(D.Loc, D.PD);
2323
8.76k
      }
2324
3.17k
    }
2325
2326
3.17k
    if (!analyzed)
2327
0
      flushDiagnostics(S, fscope);
2328
3.17k
  }
2329
2330
  // Warning: check missing 'return'
2331
285k
  if (P.enableCheckFallThrough) {
2332
279k
    const CheckFallThroughDiagnostics &CD =
2333
279k
        (isa<BlockDecl>(D)
2334
279k
             ? 
CheckFallThroughDiagnostics::MakeForBlock()2.05k
2335
279k
             : 
(277k
isa<CXXMethodDecl>(D)277k
&&
2336
277k
                
cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call50.9k
&&
2337
277k
                
cast<CXXMethodDecl>(D)->getParent()->isLambda()3.45k
)
2338
277k
                   ? 
CheckFallThroughDiagnostics::MakeForLambda()3.37k
2339
277k
                   : 
(273k
fscope->isCoroutine()273k
2340
273k
                          ? 
CheckFallThroughDiagnostics::MakeForCoroutine(D)85
2341
273k
                          : 
CheckFallThroughDiagnostics::MakeForFunction(D)273k
));
2342
279k
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2343
279k
  }
2344
2345
  // Warning: check for unreachable code
2346
285k
  if (P.enableCheckUnreachable) {
2347
    // Only check for unreachable code on non-template instantiations.
2348
    // Different template instantiations can effectively change the control-flow
2349
    // and it is very difficult to prove that a snippet of code in a template
2350
    // is unreachable for all instantiations.
2351
163
    bool isTemplateInstantiation = false;
2352
163
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2353
159
      isTemplateInstantiation = Function->isTemplateInstantiation();
2354
163
    if (!isTemplateInstantiation)
2355
159
      CheckUnreachable(S, AC);
2356
163
  }
2357
2358
  // Check for thread safety violations
2359
285k
  if (P.enableThreadSafetyAnalysis) {
2360
2.21k
    SourceLocation FL = AC.getDecl()->getLocation();
2361
2.21k
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2362
2.21k
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2363
2.21k
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2364
2.14k
      Reporter.setIssueBetaWarnings(true);
2365
2.21k
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2366
41
      Reporter.setVerbose(true);
2367
2368
2.21k
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2369
2.21k
                                          &S.ThreadSafetyDeclCache);
2370
2.21k
    Reporter.emitDiagnostics();
2371
2.21k
  }
2372
2373
  // Check for violations of consumed properties.
2374
285k
  if (P.enableConsumedAnalysis) {
2375
97
    consumed::ConsumedWarningsHandler WarningHandler(S);
2376
97
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2377
97
    Analyzer.run(AC);
2378
97
  }
2379
2380
285k
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2381
285k
      
!Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc())232k
||
2382
285k
      
!Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc())232k
||
2383
285k
      
!Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())232k
) {
2384
53.0k
    if (CFG *cfg = AC.getCFG()) {
2385
53.0k
      UninitValsDiagReporter reporter(S);
2386
53.0k
      UninitVariablesAnalysisStats stats;
2387
53.0k
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2388
53.0k
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2389
53.0k
                                        reporter, stats);
2390
2391
53.0k
      if (S.CollectStats && 
stats.NumVariablesAnalyzed > 00
) {
2392
0
        ++NumUninitAnalysisFunctions;
2393
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2394
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2395
0
        MaxUninitAnalysisVariablesPerFunction =
2396
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2397
0
                     stats.NumVariablesAnalyzed);
2398
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2399
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2400
0
                     stats.NumBlockVisits);
2401
0
      }
2402
53.0k
    }
2403
53.0k
  }
2404
2405
  // Check for violations of "called once" parameter properties.
2406
285k
  if (S.getLangOpts().ObjC && 
!S.getLangOpts().CPlusPlus29.3k
&&
2407
285k
      
shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())10.4k
) {
2408
10.4k
    if (AC.getCFG()) {
2409
10.2k
      CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2410
10.2k
      checkCalledOnceParameters(
2411
10.2k
          AC, Reporter,
2412
10.2k
          shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2413
10.2k
    }
2414
10.4k
  }
2415
2416
285k
  bool FallThroughDiagFull =
2417
285k
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2418
285k
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2419
285k
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2420
285k
  if (FallThroughDiagFull || 
FallThroughDiagPerFunction285k
||
2421
285k
      
fscope->HasFallthroughStmt285k
) {
2422
122
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2423
122
  }
2424
2425
285k
  if (S.getLangOpts().ObjCWeak &&
2426
285k
      
!Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc())1.41k
)
2427
112
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2428
2429
2430
  // Check for infinite self-recursion in functions
2431
285k
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2432
285k
                       D->getBeginLoc())) {
2433
38.8k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2434
38.8k
      checkRecursiveFunction(S, FD, Body, AC);
2435
38.8k
    }
2436
38.8k
  }
2437
2438
  // Check for throw out of non-throwing function.
2439
285k
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2440
285k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2441
278k
      if (S.getLangOpts().CPlusPlus && 
isNoexcept(FD)148k
)
2442
7.39k
        checkThrowInNonThrowingFunc(S, FD, AC);
2443
2444
  // If none of the previous checks caused a CFG build, trigger one here
2445
  // for the logical error handler.
2446
285k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2447
38.8k
    AC.getCFG();
2448
38.8k
  }
2449
2450
  // Collect statistics about the CFG if it was built.
2451
285k
  if (S.CollectStats && 
AC.isCFGBuilt()3
) {
2452
3
    ++NumFunctionsAnalyzed;
2453
3
    if (CFG *cfg = AC.getCFG()) {
2454
      // If we successfully built a CFG for this context, record some more
2455
      // detail information about it.
2456
3
      NumCFGBlocks += cfg->getNumBlockIDs();
2457
3
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2458
3
                                         cfg->getNumBlockIDs());
2459
3
    } else {
2460
0
      ++NumFunctionsWithBadCFGs;
2461
0
    }
2462
3
  }
2463
285k
}
2464
2465
3
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2466
3
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2467
2468
3
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2469
3
  unsigned AvgCFGBlocksPerFunction =
2470
3
      !NumCFGsBuilt ? 
00
: NumCFGBlocks/NumCFGsBuilt;
2471
3
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2472
3
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2473
3
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2474
3
               << "  " << AvgCFGBlocksPerFunction
2475
3
               << " average CFG blocks per function.\n"
2476
3
               << "  " << MaxCFGBlocksPerFunction
2477
3
               << " max CFG blocks per function.\n";
2478
2479
3
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2480
3
      : 
NumUninitAnalysisVariables/NumUninitAnalysisFunctions0
;
2481
3
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2482
3
      : 
NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions0
;
2483
3
  llvm::errs() << NumUninitAnalysisFunctions
2484
3
               << " functions analyzed for uninitialiazed variables\n"
2485
3
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2486
3
               << "  " << AvgUninitVariablesPerFunction
2487
3
               << " average variables per function.\n"
2488
3
               << "  " << MaxUninitAnalysisVariablesPerFunction
2489
3
               << " max variables per function.\n"
2490
3
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2491
3
               << "  " << AvgUninitBlockVisitsPerFunction
2492
3
               << " average block visits per function.\n"
2493
3
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2494
3
               << " max block visits per function.\n";
2495
3
}