Coverage Report

Created: 2020-11-24 06:42

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/AST/DeclObjC.h"
18
#include "clang/AST/EvaluatedExprVisitor.h"
19
#include "clang/AST/ExprCXX.h"
20
#include "clang/AST/ExprObjC.h"
21
#include "clang/AST/ParentMap.h"
22
#include "clang/AST/RecursiveASTVisitor.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/AST/StmtObjC.h"
25
#include "clang/AST/StmtVisitor.h"
26
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27
#include "clang/Analysis/Analyses/Consumed.h"
28
#include "clang/Analysis/Analyses/ReachableCode.h"
29
#include "clang/Analysis/Analyses/ThreadSafety.h"
30
#include "clang/Analysis/Analyses/UninitializedValues.h"
31
#include "clang/Analysis/AnalysisDeclContext.h"
32
#include "clang/Analysis/CFG.h"
33
#include "clang/Analysis/CFGStmtMap.h"
34
#include "clang/Basic/SourceLocation.h"
35
#include "clang/Basic/SourceManager.h"
36
#include "clang/Lex/Preprocessor.h"
37
#include "clang/Sema/ScopeInfo.h"
38
#include "clang/Sema/SemaInternal.h"
39
#include "llvm/ADT/BitVector.h"
40
#include "llvm/ADT/MapVector.h"
41
#include "llvm/ADT/SmallString.h"
42
#include "llvm/ADT/SmallVector.h"
43
#include "llvm/ADT/StringRef.h"
44
#include "llvm/Support/Casting.h"
45
#include <algorithm>
46
#include <deque>
47
#include <iterator>
48
49
using namespace clang;
50
51
//===----------------------------------------------------------------------===//
52
// Unreachable code analysis.
53
//===----------------------------------------------------------------------===//
54
55
namespace {
56
  class UnreachableCodeHandler : public reachable_code::Callback {
57
    Sema &S;
58
    SourceRange PreviousSilenceableCondVal;
59
60
  public:
61
157
    UnreachableCodeHandler(Sema &s) : S(s) {}
62
63
    void HandleUnreachable(reachable_code::UnreachableKind UK,
64
                           SourceLocation L,
65
                           SourceRange SilenceableCondVal,
66
                           SourceRange R1,
67
170
                           SourceRange R2) override {
68
      // Avoid reporting multiple unreachable code diagnostics that are
69
      // triggered by the same conditional value.
70
170
      if (PreviousSilenceableCondVal.isValid() &&
71
25
          SilenceableCondVal.isValid() &&
72
25
          PreviousSilenceableCondVal == SilenceableCondVal)
73
2
        return;
74
168
      PreviousSilenceableCondVal = SilenceableCondVal;
75
76
168
      unsigned diag = diag::warn_unreachable;
77
168
      switch (UK) {
78
16
        case reachable_code::UK_Break:
79
16
          diag = diag::warn_unreachable_break;
80
16
          break;
81
32
        case reachable_code::UK_Return:
82
32
          diag = diag::warn_unreachable_return;
83
32
          break;
84
3
        case reachable_code::UK_Loop_Increment:
85
3
          diag = diag::warn_unreachable_loop_increment;
86
3
          break;
87
117
        case reachable_code::UK_Other:
88
117
          break;
89
168
      }
90
91
168
      S.Diag(L, diag) << R1 << R2;
92
93
168
      SourceLocation Open = SilenceableCondVal.getBegin();
94
168
      if (Open.isValid()) {
95
43
        SourceLocation Close = SilenceableCondVal.getEnd();
96
43
        Close = S.getLocForEndOfToken(Close);
97
43
        if (Close.isValid()) {
98
43
          S.Diag(Open, diag::note_unreachable_silence)
99
43
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
100
43
            << FixItHint::CreateInsertion(Close, ")");
101
43
        }
102
43
      }
103
168
    }
104
  };
105
} // anonymous namespace
106
107
/// CheckUnreachable - Check for unreachable code.
108
159
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
109
  // As a heuristic prune all diagnostics not in the main file.  Currently
110
  // the majority of warnings in headers are false positives.  These
111
  // are largely caused by configuration state, e.g. preprocessor
112
  // defined code, etc.
113
  //
114
  // Note that this is also a performance optimization.  Analyzing
115
  // headers many times can be expensive.
116
159
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
117
2
    return;
118
119
157
  UnreachableCodeHandler UC(S);
120
157
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
121
157
}
122
123
namespace {
124
/// Warn on logical operator errors in CFGBuilder
125
class LogicalErrorHandler : public CFGCallback {
126
  Sema &S;
127
128
public:
129
26.7k
  LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
130
131
1.22k
  static bool HasMacroID(const Expr *E) {
132
1.22k
    if (E->getExprLoc().isMacroID())
133
6
      return true;
134
135
    // Recurse to children.
136
1.21k
    for (const Stmt *SubStmt : E->children())
137
1.07k
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
138
1.07k
        if (HasMacroID(SubExpr))
139
16
          return true;
140
141
1.19k
    return false;
142
1.21k
  }
143
144
104
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
145
104
    if (HasMacroID(B))
146
2
      return;
147
148
102
    SourceRange DiagRange = B->getSourceRange();
149
102
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
150
102
        << DiagRange << isAlwaysTrue;
151
102
  }
152
153
  void compareBitwiseEquality(const BinaryOperator *B,
154
28
                              bool isAlwaysTrue) override {
155
28
    if (HasMacroID(B))
156
4
      return;
157
158
24
    SourceRange DiagRange = B->getSourceRange();
159
24
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
160
24
        << DiagRange << isAlwaysTrue;
161
24
  }
162
163
16
  void compareBitwiseOr(const BinaryOperator *B) override {
164
16
    if (HasMacroID(B))
165
0
      return;
166
167
16
    SourceRange DiagRange = B->getSourceRange();
168
16
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
169
16
  }
170
171
  static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
172
418k
                                   SourceLocation Loc) {
173
418k
    return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
174
364k
           !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc);
175
418k
  }
176
};
177
} // anonymous namespace
178
179
//===----------------------------------------------------------------------===//
180
// Check for infinite self-recursion in functions
181
//===----------------------------------------------------------------------===//
182
183
// Returns true if the function is called anywhere within the CFGBlock.
184
// For member functions, the additional condition of being call from the
185
// this pointer is required.
186
28.9k
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
187
  // Process all the Stmt's in this block to find any calls to FD.
188
342k
  for (const auto &B : Block) {
189
342k
    if (B.getKind() != CFGElement::Statement)
190
742
      continue;
191
192
342k
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
193
342k
    if (!CE || 
!CE->getCalleeDecl()33.3k
||
194
33.3k
        CE->getCalleeDecl()->getCanonicalDecl() != FD)
195
342k
      continue;
196
197
    // Skip function calls which are qualified with a templated class.
198
17
    if (const DeclRefExpr *DRE =
199
14
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
200
14
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
201
1
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
202
1
            isa<TemplateSpecializationType>(NNS->getAsType())) {
203
1
          continue;
204
1
        }
205
16
      }
206
14
    }
207
208
16
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
209
16
    if (!MCE || 
isa<CXXThisExpr>(MCE->getImplicitObjectArgument())3
||
210
1
        !MCE->getMethodDecl()->isVirtual())
211
16
      return true;
212
16
  }
213
28.9k
  return false;
214
28.9k
}
215
216
// Returns true if every path from the entry block passes through a call to FD.
217
26.7k
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
218
26.7k
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
219
26.7k
  llvm::SmallVector<CFGBlock *, 16> WorkList;
220
  // Keep track of whether we found at least one recursive path.
221
26.7k
  bool foundRecursion = false;
222
223
26.7k
  const unsigned ExitID = cfg->getExit().getBlockID();
224
225
  // Seed the work list with the entry block.
226
26.7k
  WorkList.push_back(&cfg->getEntry());
227
228
54.9k
  while (!WorkList.empty()) {
229
54.9k
    CFGBlock *Block = WorkList.pop_back_val();
230
231
84.3k
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; 
++I29.4k
) {
232
56.1k
      if (CFGBlock *SuccBlock = *I) {
233
55.6k
        if (!Visited.insert(SuccBlock).second)
234
55
          continue;
235
236
        // Found a path to the exit node without a recursive call.
237
55.6k
        if (ExitID == SuccBlock->getBlockID())
238
26.7k
          return false;
239
240
        // If the successor block contains a recursive call, end analysis there.
241
28.9k
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
242
16
          foundRecursion = true;
243
16
          continue;
244
16
        }
245
246
28.9k
        WorkList.push_back(SuccBlock);
247
28.9k
      }
248
56.1k
    }
249
54.9k
  }
250
15
  return foundRecursion;
251
26.7k
}
252
253
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
254
26.7k
                                   const Stmt *Body, AnalysisDeclContext &AC) {
255
26.7k
  FD = FD->getCanonicalDecl();
256
257
  // Only run on non-templated functions and non-templated members of
258
  // templated classes.
259
26.7k
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
260
48
      FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
261
24
    return;
262
263
26.7k
  CFG *cfg = AC.getCFG();
264
26.7k
  if (!cfg) 
return0
;
265
266
  // If the exit block is unreachable, skip processing the function.
267
26.7k
  if (cfg->getExit().pred_empty())
268
5
    return;
269
270
  // Emit diagnostic if a recursive function call is detected for all paths.
271
26.7k
  if (checkForRecursiveFunctionCall(FD, cfg))
272
13
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
273
26.7k
}
274
275
//===----------------------------------------------------------------------===//
276
// Check for throw in a non-throwing function.
277
//===----------------------------------------------------------------------===//
278
279
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
280
/// can reach ExitBlock.
281
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
282
88
                         CFG *Body) {
283
88
  SmallVector<CFGBlock *, 16> Stack;
284
88
  llvm::BitVector Queued(Body->getNumBlockIDs());
285
286
88
  Stack.push_back(&ThrowBlock);
287
88
  Queued[ThrowBlock.getBlockID()] = true;
288
289
180
  while (!Stack.empty()) {
290
145
    CFGBlock &UnwindBlock = *Stack.back();
291
145
    Stack.pop_back();
292
293
167
    for (auto &Succ : UnwindBlock.succs()) {
294
167
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
295
0
        continue;
296
297
167
      if (Succ->getBlockID() == Body->getExit().getBlockID())
298
53
        return true;
299
300
114
      if (auto *Catch =
301
57
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
302
57
        QualType Caught = Catch->getCaughtType();
303
57
        if (Caught.isNull() || // catch (...) catches everything
304
53
            !E->getSubExpr() || // throw; is considered cuaght by any handler
305
50
            S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
306
          // Exception doesn't escape via this path.
307
35
          break;
308
57
      } else {
309
57
        Stack.push_back(Succ);
310
57
        Queued[Succ->getBlockID()] = true;
311
57
      }
312
114
    }
313
145
  }
314
315
35
  return false;
316
88
}
317
318
static void visitReachableThrows(
319
    CFG *BodyCFG,
320
7.33k
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
321
7.33k
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
322
7.33k
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
323
24.1k
  for (CFGBlock *B : *BodyCFG) {
324
24.1k
    if (!Reachable[B->getBlockID()])
325
36
      continue;
326
51.3k
    
for (CFGElement &E : *B)24.0k
{
327
51.3k
      Optional<CFGStmt> S = E.getAs<CFGStmt>();
328
51.3k
      if (!S)
329
531
        continue;
330
50.8k
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
331
88
        Visit(Throw, *B);
332
50.8k
    }
333
24.0k
  }
334
7.33k
}
335
336
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
337
53
                                                 const FunctionDecl *FD) {
338
53
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
339
53
      FD->getTypeSourceInfo()) {
340
53
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
341
53
    if (S.getLangOpts().CPlusPlus11 &&
342
53
        (isa<CXXDestructorDecl>(FD) ||
343
42
         FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
344
41
         FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
345
12
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
346
12
                                         getAs<FunctionProtoType>())
347
12
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
348
12
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
349
12
            << FD->getExceptionSpecSourceRange();
350
12
    } else
351
41
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
352
41
          << FD->getExceptionSpecSourceRange();
353
53
  }
354
53
}
355
356
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
357
7.33k
                                        AnalysisDeclContext &AC) {
358
7.33k
  CFG *BodyCFG = AC.getCFG();
359
7.33k
  if (!BodyCFG)
360
0
    return;
361
7.33k
  if (BodyCFG->getExit().pred_empty())
362
0
    return;
363
7.33k
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
364
88
    if (throwEscapes(S, Throw, Block, BodyCFG))
365
53
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
366
88
  });
367
7.33k
}
368
369
126k
static bool isNoexcept(const FunctionDecl *FD) {
370
126k
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
371
126k
  if (FPT->isNothrow() || 
FD->hasAttr<NoThrowAttr>()119k
)
372
7.33k
    return true;
373
119k
  return false;
374
119k
}
375
376
//===----------------------------------------------------------------------===//
377
// Check for missing return value.
378
//===----------------------------------------------------------------------===//
379
380
enum ControlFlowKind {
381
  UnknownFallThrough,
382
  NeverFallThrough,
383
  MaybeFallThrough,
384
  AlwaysFallThrough,
385
  NeverFallThroughOrReturn
386
};
387
388
/// CheckFallThrough - Check that we don't fall off the end of a
389
/// Statement that should return a value.
390
///
391
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
392
/// MaybeFallThrough iff we might or might not fall off the end,
393
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
394
/// return.  We assume NeverFallThrough iff we never fall off the end of the
395
/// statement but we may return.  We assume that functions not marked noreturn
396
/// will return.
397
91.1k
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
398
91.1k
  CFG *cfg = AC.getCFG();
399
91.1k
  if (!cfg) 
return UnknownFallThrough39
;
400
401
  // The CFG leaves in dead things, and we don't want the dead code paths to
402
  // confuse us, so we mark all live things first.
403
91.1k
  llvm::BitVector live(cfg->getNumBlockIDs());
404
91.1k
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
405
91.1k
                                                          live);
406
407
91.1k
  bool AddEHEdges = AC.getAddEHEdges();
408
91.1k
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
409
    // When there are things remaining dead, and we didn't add EH edges
410
    // from CallExprs to the catch clauses, we have to go back and
411
    // mark them as live.
412
6.49k
    
for (const auto *B : *cfg)870
{
413
6.49k
      if (!live[B->getBlockID()]) {
414
1.50k
        if (B->pred_begin() == B->pred_end()) {
415
885
          const Stmt *Term = B->getTerminatorStmt();
416
885
          if (Term && 
isa<CXXTryStmt>(Term)145
)
417
            // When not adding EH edges from calls, catch clauses
418
            // can otherwise seem dead.  Avoid noting them as dead.
419
72
            count += reachable_code::ScanReachableFromBlock(B, live);
420
885
          continue;
421
885
        }
422
1.50k
      }
423
6.49k
    }
424
425
  // Now we know what is live, we check the live precessors of the exit block
426
  // and look for fall through paths, being careful to ignore normal returns,
427
  // and exceptional paths.
428
91.1k
  bool HasLiveReturn = false;
429
91.1k
  bool HasFakeEdge = false;
430
91.1k
  bool HasPlainEdge = false;
431
91.1k
  bool HasAbnormalEdge = false;
432
433
  // Ignore default cases that aren't likely to be reachable because all
434
  // enums in a switch(X) have explicit case statements.
435
91.1k
  CFGBlock::FilterOptions FO;
436
91.1k
  FO.IgnoreDefaultsWithCoveredEnums = 1;
437
438
91.1k
  for (CFGBlock::filtered_pred_iterator I =
439
91.1k
           cfg->getExit().filtered_pred_start_end(FO);
440
185k
       I.hasMore(); 
++I94.6k
) {
441
94.6k
    const CFGBlock &B = **I;
442
94.6k
    if (!live[B.getBlockID()])
443
659
      continue;
444
445
    // Skip blocks which contain an element marked as no-return. They don't
446
    // represent actually viable edges into the exit block, so mark them as
447
    // abnormal.
448
93.9k
    if (B.hasNoReturnElement()) {
449
341
      HasAbnormalEdge = true;
450
341
      continue;
451
341
    }
452
453
    // Destructors can appear after the 'return' in the CFG.  This is
454
    // normal.  We need to look pass the destructors for the return
455
    // statement (if it exists).
456
93.6k
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
457
458
95.2k
    for ( ; ri != re ; 
++ri1.64k
)
459
94.5k
      if (ri->getAs<CFGStmt>())
460
92.9k
        break;
461
462
    // No more CFGElements in the block?
463
93.6k
    if (ri == re) {
464
713
      const Stmt *Term = B.getTerminatorStmt();
465
713
      if (Term && 
isa<CXXTryStmt>(Term)59
) {
466
55
        HasAbnormalEdge = true;
467
55
        continue;
468
55
      }
469
      // A labeled empty statement, or the entry block...
470
658
      HasPlainEdge = true;
471
658
      continue;
472
658
    }
473
474
92.9k
    CFGStmt CS = ri->castAs<CFGStmt>();
475
92.9k
    const Stmt *S = CS.getStmt();
476
92.9k
    if (isa<ReturnStmt>(S) || 
isa<CoreturnStmt>(S)330
) {
477
92.6k
      HasLiveReturn = true;
478
92.6k
      continue;
479
92.6k
    }
480
314
    if (isa<ObjCAtThrowStmt>(S)) {
481
5
      HasFakeEdge = true;
482
5
      continue;
483
5
    }
484
309
    if (isa<CXXThrowExpr>(S)) {
485
49
      HasFakeEdge = true;
486
49
      continue;
487
49
    }
488
260
    if (isa<MSAsmStmt>(S)) {
489
      // TODO: Verify this is correct.
490
13
      HasFakeEdge = true;
491
13
      HasLiveReturn = true;
492
13
      continue;
493
13
    }
494
247
    if (isa<CXXTryStmt>(S)) {
495
0
      HasAbnormalEdge = true;
496
0
      continue;
497
0
    }
498
247
    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
499
0
        == B.succ_end()) {
500
0
      HasAbnormalEdge = true;
501
0
      continue;
502
0
    }
503
504
247
    HasPlainEdge = true;
505
247
  }
506
91.1k
  if (!HasPlainEdge) {
507
90.2k
    if (HasLiveReturn)
508
89.9k
      return NeverFallThrough;
509
283
    return NeverFallThroughOrReturn;
510
283
  }
511
895
  if (HasAbnormalEdge || 
HasFakeEdge877
||
HasLiveReturn877
)
512
39
    return MaybeFallThrough;
513
  // This says AlwaysFallThrough for calls to functions that are not marked
514
  // noreturn, that don't return.  If people would like this warning to be more
515
  // accurate, such functions should be marked as noreturn.
516
856
  return AlwaysFallThrough;
517
856
}
518
519
namespace {
520
521
struct CheckFallThroughDiagnostics {
522
  unsigned diag_MaybeFallThrough_HasNoReturn;
523
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
524
  unsigned diag_AlwaysFallThrough_HasNoReturn;
525
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
526
  unsigned diag_NeverFallThroughOrReturn;
527
  enum { Function, Block, Lambda, Coroutine } funMode;
528
  SourceLocation FuncLoc;
529
530
197k
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
531
197k
    CheckFallThroughDiagnostics D;
532
197k
    D.FuncLoc = Func->getLocation();
533
197k
    D.diag_MaybeFallThrough_HasNoReturn =
534
197k
      diag::warn_falloff_noreturn_function;
535
197k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
536
197k
      diag::warn_maybe_falloff_nonvoid_function;
537
197k
    D.diag_AlwaysFallThrough_HasNoReturn =
538
197k
      diag::warn_falloff_noreturn_function;
539
197k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
540
197k
      diag::warn_falloff_nonvoid_function;
541
542
    // Don't suggest that virtual functions be marked "noreturn", since they
543
    // might be overridden by non-noreturn functions.
544
197k
    bool isVirtualMethod = false;
545
197k
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
546
45.8k
      isVirtualMethod = Method->isVirtual();
547
548
    // Don't suggest that template instantiations be marked "noreturn"
549
197k
    bool isTemplateInstantiation = false;
550
197k
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
551
192k
      isTemplateInstantiation = Function->isTemplateInstantiation();
552
553
197k
    if (!isVirtualMethod && 
!isTemplateInstantiation195k
)
554
179k
      D.diag_NeverFallThroughOrReturn =
555
179k
        diag::warn_suggest_noreturn_function;
556
17.6k
    else
557
17.6k
      D.diag_NeverFallThroughOrReturn = 0;
558
559
197k
    D.funMode = Function;
560
197k
    return D;
561
197k
  }
562
563
82
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
564
82
    CheckFallThroughDiagnostics D;
565
82
    D.FuncLoc = Func->getLocation();
566
82
    D.diag_MaybeFallThrough_HasNoReturn = 0;
567
82
    D.diag_MaybeFallThrough_ReturnsNonVoid =
568
82
        diag::warn_maybe_falloff_nonvoid_coroutine;
569
82
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
570
82
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
571
82
        diag::warn_falloff_nonvoid_coroutine;
572
82
    D.funMode = Coroutine;
573
82
    return D;
574
82
  }
575
576
1.99k
  static CheckFallThroughDiagnostics MakeForBlock() {
577
1.99k
    CheckFallThroughDiagnostics D;
578
1.99k
    D.diag_MaybeFallThrough_HasNoReturn =
579
1.99k
      diag::err_noreturn_block_has_return_expr;
580
1.99k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
581
1.99k
      diag::err_maybe_falloff_nonvoid_block;
582
1.99k
    D.diag_AlwaysFallThrough_HasNoReturn =
583
1.99k
      diag::err_noreturn_block_has_return_expr;
584
1.99k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
585
1.99k
      diag::err_falloff_nonvoid_block;
586
1.99k
    D.diag_NeverFallThroughOrReturn = 0;
587
1.99k
    D.funMode = Block;
588
1.99k
    return D;
589
1.99k
  }
590
591
2.98k
  static CheckFallThroughDiagnostics MakeForLambda() {
592
2.98k
    CheckFallThroughDiagnostics D;
593
2.98k
    D.diag_MaybeFallThrough_HasNoReturn =
594
2.98k
      diag::err_noreturn_lambda_has_return_expr;
595
2.98k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
596
2.98k
      diag::warn_maybe_falloff_nonvoid_lambda;
597
2.98k
    D.diag_AlwaysFallThrough_HasNoReturn =
598
2.98k
      diag::err_noreturn_lambda_has_return_expr;
599
2.98k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
600
2.98k
      diag::warn_falloff_nonvoid_lambda;
601
2.98k
    D.diag_NeverFallThroughOrReturn = 0;
602
2.98k
    D.funMode = Lambda;
603
2.98k
    return D;
604
2.98k
  }
605
606
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
607
202k
                        bool HasNoReturn) const {
608
202k
    if (funMode == Function) {
609
197k
      return (ReturnsVoid ||
610
90.0k
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
611
90.0k
                          FuncLoc)) &&
612
107k
             (!HasNoReturn ||
613
163
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
614
163
                          FuncLoc)) &&
615
107k
             (!ReturnsVoid ||
616
107k
              D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
617
197k
    }
618
5.06k
    if (funMode == Coroutine) {
619
82
      return (ReturnsVoid ||
620
18
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
621
18
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
622
18
                          FuncLoc)) &&
623
64
             (!HasNoReturn);
624
82
    }
625
    // For blocks / lambdas.
626
4.98k
    return ReturnsVoid && 
!HasNoReturn4.00k
;
627
4.98k
  }
628
};
629
630
} // anonymous namespace
631
632
/// CheckFallThroughForBody - Check that we don't fall off the end of a
633
/// function that should return a value.  Check that we don't fall off the end
634
/// of a noreturn function.  We assume that functions and blocks not marked
635
/// noreturn will return.
636
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
637
                                    QualType BlockType,
638
                                    const CheckFallThroughDiagnostics &CD,
639
                                    AnalysisDeclContext &AC,
640
202k
                                    sema::FunctionScopeInfo *FSI) {
641
642
202k
  bool ReturnsVoid = false;
643
202k
  bool HasNoReturn = false;
644
202k
  bool IsCoroutine = FSI->isCoroutine();
645
646
202k
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
647
195k
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
648
83
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
649
195k
    else
650
195k
      ReturnsVoid = FD->getReturnType()->isVoidType();
651
195k
    HasNoReturn = FD->isNoReturn();
652
195k
  }
653
7.23k
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
654
5.23k
    ReturnsVoid = MD->getReturnType()->isVoidType();
655
5.23k
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
656
5.23k
  }
657
1.99k
  else if (isa<BlockDecl>(D)) {
658
1.99k
    if (const FunctionType *FT =
659
1.99k
          BlockType->getPointeeType()->getAs<FunctionType>()) {
660
1.99k
      if (FT->getReturnType()->isVoidType())
661
1.61k
        ReturnsVoid = true;
662
1.99k
      if (FT->getNoReturnAttr())
663
1
        HasNoReturn = true;
664
1.99k
    }
665
1.99k
  }
666
667
202k
  DiagnosticsEngine &Diags = S.getDiagnostics();
668
669
  // Short circuit for compilation speed.
670
202k
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
671
111k
      return;
672
91.1k
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
673
847
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
674
847
    if (IsCoroutine)
675
6
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
676
841
    else
677
841
      S.Diag(Loc, DiagID);
678
847
  };
679
680
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
681
91.1k
  if (D->getAsFunction() && 
D->getAsFunction()->isCPUDispatchMultiVersion()87.9k
)
682
0
    return;
683
684
  // Either in a function body compound statement, or a function-try-block.
685
91.1k
  switch (CheckFallThrough(AC)) {
686
39
    case UnknownFallThrough:
687
39
      break;
688
689
39
    case MaybeFallThrough:
690
39
      if (HasNoReturn)
691
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
692
39
      else if (!ReturnsVoid)
693
38
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
694
39
      break;
695
856
    case AlwaysFallThrough:
696
856
      if (HasNoReturn)
697
101
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
698
755
      else if (!ReturnsVoid)
699
708
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
700
856
      break;
701
283
    case NeverFallThroughOrReturn:
702
283
      if (ReturnsVoid && 
!HasNoReturn70
&&
CD.diag_NeverFallThroughOrReturn9
) {
703
4
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
704
3
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
705
1
        } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
706
1
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
707
0
        } else {
708
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
709
0
        }
710
4
      }
711
283
      break;
712
89.9k
    case NeverFallThrough:
713
89.9k
      break;
714
91.1k
  }
715
91.1k
}
716
717
//===----------------------------------------------------------------------===//
718
// -Wuninitialized
719
//===----------------------------------------------------------------------===//
720
721
namespace {
722
/// ContainsReference - A visitor class to search for references to
723
/// a particular declaration (the needle) within any evaluated component of an
724
/// expression (recursively).
725
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
726
  bool FoundReference;
727
  const DeclRefExpr *Needle;
728
729
public:
730
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
731
732
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
733
69
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
734
735
300
  void VisitExpr(const Expr *E) {
736
    // Stop evaluating if we already have a reference.
737
300
    if (FoundReference)
738
25
      return;
739
740
275
    Inherited::VisitExpr(E);
741
275
  }
742
743
128
  void VisitDeclRefExpr(const DeclRefExpr *E) {
744
128
    if (E == Needle)
745
67
      FoundReference = true;
746
61
    else
747
61
      Inherited::VisitDeclRefExpr(E);
748
128
  }
749
750
69
  bool doesContainReference() const { return FoundReference; }
751
};
752
} // anonymous namespace
753
754
1.02k
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
755
1.02k
  QualType VariableTy = VD->getType().getCanonicalType();
756
1.02k
  if (VariableTy->isBlockPointerType() &&
757
3
      !VD->hasAttr<BlocksAttr>()) {
758
3
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
759
3
        << VD->getDeclName()
760
3
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
761
3
    return true;
762
3
  }
763
764
  // Don't issue a fixit if there is already an initializer.
765
1.01k
  if (VD->getInit())
766
2
    return false;
767
768
  // Don't suggest a fixit inside macros.
769
1.01k
  if (VD->getEndLoc().isMacroID())
770
2
    return false;
771
772
1.01k
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
773
774
  // Suggest possible initialization (if any).
775
1.01k
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
776
1.01k
  if (Init.empty())
777
5
    return false;
778
779
1.00k
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
780
1.00k
    << FixItHint::CreateInsertion(Loc, Init);
781
1.00k
  return true;
782
1.00k
}
783
784
/// Create a fixit to remove an if-like statement, on the assumption that its
785
/// condition is CondVal.
786
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
787
                          const Stmt *Else, bool CondVal,
788
23
                          FixItHint &Fixit1, FixItHint &Fixit2) {
789
23
  if (CondVal) {
790
    // If condition is always true, remove all but the 'then'.
791
12
    Fixit1 = FixItHint::CreateRemoval(
792
12
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
793
12
    if (Else) {
794
2
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
795
2
      Fixit2 =
796
2
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
797
2
    }
798
11
  } else {
799
    // If condition is always false, remove all but the 'else'.
800
11
    if (Else)
801
11
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
802
11
          If->getBeginLoc(), Else->getBeginLoc()));
803
0
    else
804
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
805
11
  }
806
23
}
807
808
/// DiagUninitUse -- Helper function to produce a diagnostic for an
809
/// uninitialized use of a variable.
810
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
811
1.01k
                          bool IsCapturedByBlock) {
812
1.01k
  bool Diagnosed = false;
813
814
1.01k
  switch (Use.getKind()) {
815
923
  case UninitUse::Always:
816
923
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
817
923
        << VD->getDeclName() << IsCapturedByBlock
818
923
        << Use.getUser()->getSourceRange();
819
923
    return;
820
821
11
  case UninitUse::AfterDecl:
822
11
  case UninitUse::AfterCall:
823
11
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
824
11
      << VD->getDeclName() << IsCapturedByBlock
825
6
      << (Use.getKind() == UninitUse::AfterDecl ? 
45
: 5)
826
11
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
827
11
      << VD->getSourceRange();
828
11
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
829
11
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
830
11
    return;
831
832
84
  case UninitUse::Maybe:
833
84
  case UninitUse::Sometimes:
834
    // Carry on to report sometimes-uninitialized branches, if possible,
835
    // or a 'may be used uninitialized' diagnostic otherwise.
836
84
    break;
837
84
  }
838
839
  // Diagnose each branch which leads to a sometimes-uninitialized use.
840
84
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
841
139
       I != E; 
++I55
) {
842
55
    assert(Use.getKind() == UninitUse::Sometimes);
843
844
55
    const Expr *User = Use.getUser();
845
55
    const Stmt *Term = I->Terminator;
846
847
    // Information used when building the diagnostic.
848
55
    unsigned DiagKind;
849
55
    StringRef Str;
850
55
    SourceRange Range;
851
852
    // FixIts to suppress the diagnostic by removing the dead condition.
853
    // For all binary terminators, branch 0 is taken if the condition is true,
854
    // and branch 1 is taken if the condition is false.
855
55
    int RemoveDiagKind = -1;
856
55
    const char *FixitStr =
857
47
        S.getLangOpts().CPlusPlus ? (I->Output ? 
"true"20
:
"false"27
)
858
8
                                  : (I->Output ? 
"1"4
:
"0"4
);
859
55
    FixItHint Fixit1, Fixit2;
860
861
55
    switch (Term ? Term->getStmtClass() : 
Stmt::DeclStmtClass0
) {
862
0
    default:
863
      // Don't know how to report this. Just fall back to 'may be used
864
      // uninitialized'. FIXME: Can this happen?
865
0
      continue;
866
867
    // "condition is true / condition is false".
868
19
    case Stmt::IfStmtClass: {
869
19
      const IfStmt *IS = cast<IfStmt>(Term);
870
19
      DiagKind = 0;
871
19
      Str = "if";
872
19
      Range = IS->getCond()->getSourceRange();
873
19
      RemoveDiagKind = 0;
874
19
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
875
19
                    I->Output, Fixit1, Fixit2);
876
19
      break;
877
0
    }
878
4
    case Stmt::ConditionalOperatorClass: {
879
4
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
880
4
      DiagKind = 0;
881
4
      Str = "?:";
882
4
      Range = CO->getCond()->getSourceRange();
883
4
      RemoveDiagKind = 0;
884
4
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
885
4
                    I->Output, Fixit1, Fixit2);
886
4
      break;
887
0
    }
888
12
    case Stmt::BinaryOperatorClass: {
889
12
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
890
12
      if (!BO->isLogicalOp())
891
0
        continue;
892
12
      DiagKind = 0;
893
12
      Str = BO->getOpcodeStr();
894
12
      Range = BO->getLHS()->getSourceRange();
895
12
      RemoveDiagKind = 0;
896
12
      if ((BO->getOpcode() == BO_LAnd && 
I->Output4
) ||
897
10
          (BO->getOpcode() == BO_LOr && 
!I->Output8
))
898
        // true && y -> y, false || y -> y.
899
8
        Fixit1 = FixItHint::CreateRemoval(
900
8
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
901
4
      else
902
        // false && y -> false, true || y -> true.
903
4
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
904
12
      break;
905
12
    }
906
907
    // "loop is entered / loop is exited".
908
4
    case Stmt::WhileStmtClass:
909
4
      DiagKind = 1;
910
4
      Str = "while";
911
4
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
912
4
      RemoveDiagKind = 1;
913
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
914
4
      break;
915
4
    case Stmt::ForStmtClass:
916
4
      DiagKind = 1;
917
4
      Str = "for";
918
4
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
919
4
      RemoveDiagKind = 1;
920
4
      if (I->Output)
921
2
        Fixit1 = FixItHint::CreateRemoval(Range);
922
2
      else
923
2
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
924
4
      break;
925
4
    case Stmt::CXXForRangeStmtClass:
926
4
      if (I->Output == 1) {
927
        // The use occurs if a range-based for loop's body never executes.
928
        // That may be impossible, and there's no syntactic fix for this,
929
        // so treat it as a 'may be uninitialized' case.
930
2
        continue;
931
2
      }
932
2
      DiagKind = 1;
933
2
      Str = "for";
934
2
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
935
2
      break;
936
937
    // "condition is true / loop is exited".
938
4
    case Stmt::DoStmtClass:
939
4
      DiagKind = 2;
940
4
      Str = "do";
941
4
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
942
4
      RemoveDiagKind = 1;
943
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
944
4
      break;
945
946
    // "switch case is taken".
947
2
    case Stmt::CaseStmtClass:
948
2
      DiagKind = 3;
949
2
      Str = "case";
950
2
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
951
2
      break;
952
2
    case Stmt::DefaultStmtClass:
953
2
      DiagKind = 3;
954
2
      Str = "default";
955
2
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
956
2
      break;
957
53
    }
958
959
53
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
960
53
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
961
53
      << Str << I->Output << Range;
962
53
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
963
53
        << IsCapturedByBlock << User->getSourceRange();
964
53
    if (RemoveDiagKind != -1)
965
47
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
966
47
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
967
968
53
    Diagnosed = true;
969
53
  }
970
971
84
  if (!Diagnosed)
972
33
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
973
33
        << VD->getDeclName() << IsCapturedByBlock
974
33
        << Use.getUser()->getSourceRange();
975
84
}
976
977
/// Diagnose uninitialized const reference usages.
978
static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
979
14
                                             const UninitUse &Use) {
980
14
  S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
981
14
      << VD->getDeclName() << Use.getUser()->getSourceRange();
982
14
  return true;
983
14
}
984
985
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
986
/// uninitialized variable. This manages the different forms of diagnostic
987
/// emitted for particular types of uses. Returns true if the use was diagnosed
988
/// as a warning. If a particular use is one we omit warnings for, returns
989
/// false.
990
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
991
                                     const UninitUse &Use,
992
1.08k
                                     bool alwaysReportSelfInit = false) {
993
1.08k
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
994
    // Inspect the initializer of the variable declaration which is
995
    // being referenced prior to its initialization. We emit
996
    // specialized diagnostics for self-initialization, and we
997
    // specifically avoid warning about self references which take the
998
    // form of:
999
    //
1000
    //   int x = x;
1001
    //
1002
    // This is used to indicate to GCC that 'x' is intentionally left
1003
    // uninitialized. Proven code paths which access 'x' in
1004
    // an uninitialized state after this will still warn.
1005
1.08k
    if (const Expr *Initializer = VD->getInit()) {
1006
69
      if (!alwaysReportSelfInit && 
DRE == Initializer->IgnoreParenImpCasts()66
)
1007
0
        return false;
1008
1009
69
      ContainsReference CR(S.Context, DRE);
1010
69
      CR.Visit(Initializer);
1011
69
      if (CR.doesContainReference()) {
1012
67
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1013
67
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1014
67
        return true;
1015
67
      }
1016
1.01k
    }
1017
1018
1.01k
    DiagUninitUse(S, VD, Use, false);
1019
5
  } else {
1020
5
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1021
5
    if (VD->getType()->isBlockPointerType() && 
!VD->hasAttr<BlocksAttr>()3
)
1022
3
      S.Diag(BE->getBeginLoc(),
1023
3
             diag::warn_uninit_byref_blockvar_captured_by_block)
1024
3
          << VD->getDeclName()
1025
3
          << VD->getType().getQualifiers().hasObjCLifetime();
1026
2
    else
1027
2
      DiagUninitUse(S, VD, Use, true);
1028
5
  }
1029
1030
  // Report where the variable was declared when the use wasn't within
1031
  // the initializer of that declaration & we didn't already suggest
1032
  // an initialization fixit.
1033
1.02k
  if (!SuggestInitializationFixit(S, VD))
1034
9
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1035
9
        << VD->getDeclName();
1036
1037
1.02k
  return true;
1038
1.08k
}
1039
1040
namespace {
1041
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1042
  public:
1043
    FallthroughMapper(Sema &S)
1044
      : FoundSwitchStatements(false),
1045
120
        S(S) {
1046
120
    }
1047
1048
120
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1049
1050
44
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1051
44
      bool Found = FallthroughStmts.erase(Stmt);
1052
44
      assert(Found);
1053
44
      (void)Found;
1054
44
    }
1055
1056
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1057
1058
92
    const AttrStmts &getFallthroughStmts() const {
1059
92
      return FallthroughStmts;
1060
92
    }
1061
1062
76
    void fillReachableBlocks(CFG *Cfg) {
1063
76
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1064
76
      std::deque<const CFGBlock *> BlockQueue;
1065
1066
76
      ReachableBlocks.insert(&Cfg->getEntry());
1067
76
      BlockQueue.push_back(&Cfg->getEntry());
1068
      // Mark all case blocks reachable to avoid problems with switching on
1069
      // constants, covered enums, etc.
1070
      // These blocks can contain fall-through annotations, and we don't want to
1071
      // issue a warn_fallthrough_attr_unreachable for them.
1072
647
      for (const auto *B : *Cfg) {
1073
647
        const Stmt *L = B->getLabel();
1074
647
        if (L && 
isa<SwitchCase>(L)264
&&
ReachableBlocks.insert(B).second261
)
1075
261
          BlockQueue.push_back(B);
1076
647
      }
1077
1078
693
      while (!BlockQueue.empty()) {
1079
617
        const CFGBlock *P = BlockQueue.front();
1080
617
        BlockQueue.pop_front();
1081
617
        for (CFGBlock::const_succ_iterator I = P->succ_begin(),
1082
617
                                           E = P->succ_end();
1083
1.43k
             I != E; 
++I820
) {
1084
820
          if (*I && 
ReachableBlocks.insert(*I).second798
)
1085
280
            BlockQueue.push_back(*I);
1086
820
        }
1087
617
      }
1088
76
    }
1089
1090
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1091
261
                                   bool IsTemplateInstantiation) {
1092
261
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1093
1094
261
      int UnannotatedCnt = 0;
1095
261
      AnnotatedCnt = 0;
1096
1097
261
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1098
682
      while (!BlockQueue.empty()) {
1099
421
        const CFGBlock *P = BlockQueue.front();
1100
421
        BlockQueue.pop_front();
1101
421
        if (!P) 
continue7
;
1102
1103
414
        const Stmt *Term = P->getTerminatorStmt();
1104
414
        if (Term && 
isa<SwitchStmt>(Term)281
)
1105
259
          continue; // Switch statement, good.
1106
1107
155
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1108
155
        if (SW && 
SW->getSubStmt() == B.getLabel()119
&&
P->begin() == P->end()4
)
1109
4
          continue; // Previous case label has no statements, good.
1110
1111
151
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1112
151
        if (L && 
L->getSubStmt() == B.getLabel()3
&&
P->begin() == P->end()3
)
1113
3
          continue; // Case label is preceded with a normal label, good.
1114
1115
148
        if (!ReachableBlocks.count(P)) {
1116
17
          for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
1117
17
                                                ElemEnd = P->rend();
1118
43
               ElemIt != ElemEnd; 
++ElemIt26
) {
1119
31
            if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
1120
29
              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1121
                // Don't issue a warning for an unreachable fallthrough
1122
                // attribute in template instantiations as it may not be
1123
                // unreachable in all instantiations of the template.
1124
5
                if (!IsTemplateInstantiation)
1125
4
                  S.Diag(AS->getBeginLoc(),
1126
4
                         diag::warn_fallthrough_attr_unreachable);
1127
5
                markFallthroughVisited(AS);
1128
5
                ++AnnotatedCnt;
1129
5
                break;
1130
5
              }
1131
              // Don't care about other unreachable statements.
1132
29
            }
1133
31
          }
1134
          // If there are no unreachable statements, this may be a special
1135
          // case in CFG:
1136
          // case X: {
1137
          //    A a;  // A has a destructor.
1138
          //    break;
1139
          // }
1140
          // // <<<< This place is represented by a 'hanging' CFG block.
1141
          // case Y:
1142
17
          continue;
1143
17
        }
1144
1145
131
        const Stmt *LastStmt = getLastStmt(*P);
1146
131
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1147
39
          markFallthroughVisited(AS);
1148
39
          ++AnnotatedCnt;
1149
39
          continue; // Fallthrough annotation, good.
1150
39
        }
1151
1152
92
        if (!LastStmt) { // This block contains no executable statements.
1153
          // Traverse its predecessors.
1154
2
          std::copy(P->pred_begin(), P->pred_end(),
1155
2
                    std::back_inserter(BlockQueue));
1156
2
          continue;
1157
2
        }
1158
1159
90
        ++UnannotatedCnt;
1160
90
      }
1161
261
      return !!UnannotatedCnt;
1162
261
    }
1163
1164
    // RecursiveASTVisitor setup.
1165
13
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1166
1167
60
    bool VisitAttributedStmt(AttributedStmt *S) {
1168
60
      if (asFallThroughAttr(S))
1169
60
        FallthroughStmts.insert(S);
1170
60
      return true;
1171
60
    }
1172
1173
100
    bool VisitSwitchStmt(SwitchStmt *S) {
1174
100
      FoundSwitchStatements = true;
1175
100
      return true;
1176
100
    }
1177
1178
    // We don't want to traverse local type declarations. We analyze their
1179
    // methods separately.
1180
28
    bool TraverseDecl(Decl *D) { return true; }
1181
1182
    // We analyze lambda bodies separately. Skip them here.
1183
1
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1184
      // Traverse the captures, but not the body.
1185
1
      for (const auto C : zip(LE->captures(), LE->capture_inits()))
1186
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1187
1
      return true;
1188
1
    }
1189
1190
  private:
1191
1192
220
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1193
220
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1194
104
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1195
104
          return AS;
1196
116
      }
1197
116
      return nullptr;
1198
116
    }
1199
1200
131
    static const Stmt *getLastStmt(const CFGBlock &B) {
1201
131
      if (const Stmt *Term = B.getTerminatorStmt())
1202
18
        return Term;
1203
113
      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
1204
113
                                            ElemEnd = B.rend();
1205
113
                                            ElemIt != ElemEnd; 
++ElemIt0
) {
1206
107
        if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
1207
107
          return CS->getStmt();
1208
107
      }
1209
      // Workaround to detect a statement thrown out by CFGBuilder:
1210
      //   case X: {} case Y:
1211
      //   case X: ; case Y:
1212
6
      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1213
4
        if (!isa<SwitchCase>(SW->getSubStmt()))
1214
4
          return SW->getSubStmt();
1215
1216
2
      return nullptr;
1217
2
    }
1218
1219
    bool FoundSwitchStatements;
1220
    AttrStmts FallthroughStmts;
1221
    Sema &S;
1222
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1223
  };
1224
} // anonymous namespace
1225
1226
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1227
65
                                            SourceLocation Loc) {
1228
65
  TokenValue FallthroughTokens[] = {
1229
65
    tok::l_square, tok::l_square,
1230
65
    PP.getIdentifierInfo("fallthrough"),
1231
65
    tok::r_square, tok::r_square
1232
65
  };
1233
1234
65
  TokenValue ClangFallthroughTokens[] = {
1235
65
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1236
65
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1237
65
    tok::r_square, tok::r_square
1238
65
  };
1239
1240
65
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && 
!PP.getLangOpts().C2x39
;
1241
1242
65
  StringRef MacroName;
1243
65
  if (PreferClangAttr)
1244
38
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1245
65
  if (MacroName.empty())
1246
58
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1247
65
  if (MacroName.empty() && 
!PreferClangAttr50
)
1248
22
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1249
65
  if (MacroName.empty()) {
1250
40
    if (!PreferClangAttr)
1251
12
      MacroName = "[[fallthrough]]";
1252
28
    else if (PP.getLangOpts().CPlusPlus)
1253
21
      MacroName = "[[clang::fallthrough]]";
1254
7
    else
1255
7
      MacroName = "__attribute__((fallthrough))";
1256
40
  }
1257
65
  return MacroName;
1258
65
}
1259
1260
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1261
120
                                            bool PerFunction) {
1262
120
  FallthroughMapper FM(S);
1263
120
  FM.TraverseStmt(AC.getBody());
1264
1265
120
  if (!FM.foundSwitchStatements())
1266
43
    return;
1267
1268
77
  if (PerFunction && 
FM.getFallthroughStmts().empty()16
)
1269
1
    return;
1270
1271
76
  CFG *Cfg = AC.getCFG();
1272
1273
76
  if (!Cfg)
1274
0
    return;
1275
1276
76
  FM.fillReachableBlocks(Cfg);
1277
1278
647
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1279
647
    const Stmt *Label = B->getLabel();
1280
1281
647
    if (!Label || 
!isa<SwitchCase>(Label)264
)
1282
386
      continue;
1283
1284
261
    int AnnotatedCnt;
1285
1286
261
    bool IsTemplateInstantiation = false;
1287
261
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1288
258
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1289
261
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1290
261
                                      IsTemplateInstantiation))
1291
179
      continue;
1292
1293
82
    S.Diag(Label->getBeginLoc(),
1294
10
           PerFunction ? diag::warn_unannotated_fallthrough_per_function
1295
72
                       : diag::warn_unannotated_fallthrough);
1296
1297
82
    if (!AnnotatedCnt) {
1298
81
      SourceLocation L = Label->getBeginLoc();
1299
81
      if (L.isMacroID())
1300
10
        continue;
1301
1302
71
      const Stmt *Term = B->getTerminatorStmt();
1303
      // Skip empty cases.
1304
115
      while (B->empty() && 
!Term50
&&
B->succ_size() == 144
) {
1305
44
        B = *B->succ_begin();
1306
44
        Term = B->getTerminatorStmt();
1307
44
      }
1308
71
      if (!(B->empty() && 
Term6
&&
isa<BreakStmt>(Term)6
)) {
1309
65
        Preprocessor &PP = S.getPreprocessor();
1310
65
        StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1311
65
        SmallString<64> TextToInsert(AnnotationSpelling);
1312
65
        TextToInsert += "; ";
1313
65
        S.Diag(L, diag::note_insert_fallthrough_fixit)
1314
65
            << AnnotationSpelling
1315
65
            << FixItHint::CreateInsertion(L, TextToInsert);
1316
65
      }
1317
71
      S.Diag(L, diag::note_insert_break_fixit)
1318
71
          << FixItHint::CreateInsertion(L, "break; ");
1319
71
    }
1320
82
  }
1321
1322
76
  for (const auto *F : FM.getFallthroughStmts())
1323
16
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1324
76
}
1325
1326
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1327
69
                     const Stmt *S) {
1328
69
  assert(S);
1329
1330
417
  do {
1331
417
    switch (S->getStmtClass()) {
1332
18
    case Stmt::ForStmtClass:
1333
18
    case Stmt::WhileStmtClass:
1334
18
    case Stmt::CXXForRangeStmtClass:
1335
18
    case Stmt::ObjCForCollectionStmtClass:
1336
18
      return true;
1337
4
    case Stmt::DoStmtClass: {
1338
4
      Expr::EvalResult Result;
1339
4
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1340
2
        return true;
1341
2
      return Result.Val.getInt().getBoolValue();
1342
2
    }
1343
395
    default:
1344
395
      break;
1345
395
    }
1346
395
  } while ((S = PM.getParent(S)));
1347
1348
47
  return false;
1349
69
}
1350
1351
static void diagnoseRepeatedUseOfWeak(Sema &S,
1352
                                      const sema::FunctionScopeInfo *CurFn,
1353
                                      const Decl *D,
1354
112
                                      const ParentMap &PM) {
1355
112
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1356
112
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1357
112
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1358
112
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1359
112
  StmtUsesPair;
1360
1361
112
  ASTContext &Ctx = S.getASTContext();
1362
1363
112
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1364
1365
  // Extract all weak objects that are referenced more than once.
1366
112
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1367
112
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1368
267
       I != E; 
++I155
) {
1369
155
    const WeakUseVector &Uses = I->second;
1370
1371
    // Find the first read of the weak object.
1372
155
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1373
217
    for ( ; UI != UE; 
++UI62
) {
1374
197
      if (UI->isUnsafe())
1375
135
        break;
1376
197
    }
1377
1378
    // If there were only writes to this object, don't warn.
1379
155
    if (UI == UE)
1380
20
      continue;
1381
1382
    // If there was only one read, followed by any number of writes, and the
1383
    // read is not within a loop, don't warn. Additionally, don't warn in a
1384
    // loop if the base object is a local variable -- local variables are often
1385
    // changed in loops.
1386
135
    if (UI == Uses.begin()) {
1387
117
      WeakUseVector::const_iterator UI2 = UI;
1388
143
      for (++UI2; UI2 != UE; 
++UI226
)
1389
74
        if (UI2->isUnsafe())
1390
48
          break;
1391
1392
117
      if (UI2 == UE) {
1393
69
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1394
49
          continue;
1395
1396
20
        const WeakObjectProfileTy &Profile = I->first;
1397
20
        if (!Profile.isExactProfile())
1398
2
          continue;
1399
1400
18
        const NamedDecl *Base = Profile.getBase();
1401
18
        if (!Base)
1402
2
          Base = Profile.getProperty();
1403
18
        assert(Base && "A profile always has a base or property.");
1404
1405
18
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1406
18
          if (BaseVar->hasLocalStorage() && 
!isa<ParmVarDecl>(Base)16
)
1407
2
            continue;
1408
82
      }
1409
117
    }
1410
1411
82
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1412
82
  }
1413
1414
112
  if (UsesByStmt.empty())
1415
46
    return;
1416
1417
  // Sort by first use so that we emit the warnings in a deterministic order.
1418
66
  SourceManager &SM = S.getSourceManager();
1419
66
  llvm::sort(UsesByStmt,
1420
22
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1421
22
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1422
22
                                                   RHS.first->getBeginLoc());
1423
22
             });
1424
1425
  // Classify the current code body for better warning text.
1426
  // This enum should stay in sync with the cases in
1427
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1428
  // FIXME: Should we use a common classification enum and the same set of
1429
  // possibilities all throughout Sema?
1430
66
  enum {
1431
66
    Function,
1432
66
    Method,
1433
66
    Block,
1434
66
    Lambda
1435
66
  } FunctionKind;
1436
1437
66
  if (isa<sema::BlockScopeInfo>(CurFn))
1438
2
    FunctionKind = Block;
1439
64
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1440
0
    FunctionKind = Lambda;
1441
64
  else if (isa<ObjCMethodDecl>(D))
1442
6
    FunctionKind = Method;
1443
58
  else
1444
58
    FunctionKind = Function;
1445
1446
  // Iterate through the sorted problems and emit warnings for each.
1447
82
  for (const auto &P : UsesByStmt) {
1448
82
    const Stmt *FirstRead = P.first;
1449
82
    const WeakObjectProfileTy &Key = P.second->first;
1450
82
    const WeakUseVector &Uses = P.second->second;
1451
1452
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1453
    // may not contain enough information to determine that these are different
1454
    // properties. We can only be 100% sure of a repeated use in certain cases,
1455
    // and we adjust the diagnostic kind accordingly so that the less certain
1456
    // case can be turned off if it is too noisy.
1457
82
    unsigned DiagKind;
1458
82
    if (Key.isExactProfile())
1459
68
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1460
14
    else
1461
14
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1462
1463
    // Classify the weak object being accessed for better warning text.
1464
    // This enum should stay in sync with the cases in
1465
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1466
82
    enum {
1467
82
      Variable,
1468
82
      Property,
1469
82
      ImplicitProperty,
1470
82
      Ivar
1471
82
    } ObjectKind;
1472
1473
82
    const NamedDecl *KeyProp = Key.getProperty();
1474
82
    if (isa<VarDecl>(KeyProp))
1475
6
      ObjectKind = Variable;
1476
76
    else if (isa<ObjCPropertyDecl>(KeyProp))
1477
64
      ObjectKind = Property;
1478
12
    else if (isa<ObjCMethodDecl>(KeyProp))
1479
4
      ObjectKind = ImplicitProperty;
1480
8
    else if (isa<ObjCIvarDecl>(KeyProp))
1481
8
      ObjectKind = Ivar;
1482
8
    else
1483
0
      llvm_unreachable("Unexpected weak object kind!");
1484
1485
    // Do not warn about IBOutlet weak property receivers being set to null
1486
    // since they are typically only used from the main thread.
1487
82
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1488
64
      if (Prop->hasAttr<IBOutletAttr>())
1489
4
        continue;
1490
1491
    // Show the first time the object was read.
1492
78
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1493
78
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1494
78
        << FirstRead->getSourceRange();
1495
1496
    // Print all the other accesses as notes.
1497
164
    for (const auto &Use : Uses) {
1498
164
      if (Use.getUseExpr() == FirstRead)
1499
78
        continue;
1500
86
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1501
86
             diag::note_arc_weak_also_accessed_here)
1502
86
          << Use.getUseExpr()->getSourceRange();
1503
86
    }
1504
78
  }
1505
66
}
1506
1507
namespace {
1508
class UninitValsDiagReporter : public UninitVariablesHandler {
1509
  Sema &S;
1510
  typedef SmallVector<UninitUse, 2> UsesVec;
1511
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1512
  // Prefer using MapVector to DenseMap, so that iteration order will be
1513
  // the same as insertion order. This is needed to obtain a deterministic
1514
  // order of diagnostics when calling flushDiagnostics().
1515
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1516
  UsesMap uses;
1517
  UsesMap constRefUses;
1518
1519
public:
1520
40.9k
  UninitValsDiagReporter(Sema &S) : S(S) {}
1521
40.9k
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1522
1523
2.12k
  MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1524
2.12k
    MappedType &V = um[vd];
1525
2.12k
    if (!V.getPointer())
1526
1.11k
      V.setPointer(new UsesVec());
1527
2.12k
    return V;
1528
2.12k
  }
1529
1530
  void handleUseOfUninitVariable(const VarDecl *vd,
1531
2.09k
                                 const UninitUse &use) override {
1532
2.09k
    getUses(uses, vd).getPointer()->push_back(use);
1533
2.09k
  }
1534
1535
  void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1536
15
                                         const UninitUse &use) override {
1537
15
    getUses(constRefUses, vd).getPointer()->push_back(use);
1538
15
  }
1539
1540
9
  void handleSelfInit(const VarDecl *vd) override {
1541
9
    getUses(uses, vd).setInt(true);
1542
9
    getUses(constRefUses, vd).setInt(true);
1543
9
  }
1544
1545
40.9k
  void flushDiagnostics() {
1546
1.09k
    for (const auto &P : uses) {
1547
1.09k
      const VarDecl *vd = P.first;
1548
1.09k
      const MappedType &V = P.second;
1549
1550
1.09k
      UsesVec *vec = V.getPointer();
1551
1.09k
      bool hasSelfInit = V.getInt();
1552
1553
      // Specially handle the case where we have uses of an uninitialized
1554
      // variable, but the root cause is an idiomatic self-init.  We want
1555
      // to report the diagnostic at the self-init since that is the root cause.
1556
1.09k
      if (!vec->empty() && 
hasSelfInit1.08k
&&
hasAlwaysUninitializedUse(vec)4
)
1557
2
        DiagnoseUninitializedUse(S, vd,
1558
2
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1559
2
                                           /* isAlwaysUninit */ true),
1560
2
                                 /* alwaysReportSelfInit */ true);
1561
1.09k
      else {
1562
        // Sort the uses by their SourceLocations.  While not strictly
1563
        // guaranteed to produce them in line/column order, this will provide
1564
        // a stable ordering.
1565
1.09k
        llvm::sort(vec->begin(), vec->end(),
1566
1.00k
                   [](const UninitUse &a, const UninitUse &b) {
1567
          // Prefer a more confident report over a less confident one.
1568
1.00k
          if (a.getKind() != b.getKind())
1569
5
            return a.getKind() > b.getKind();
1570
1.00k
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1571
1.00k
        });
1572
1573
1.08k
        for (const auto &U : *vec) {
1574
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1575
1.08k
          UninitUse Use = hasSelfInit ? 
UninitUse(U.getUser(), false)2
: U;
1576
1577
1.08k
          if (DiagnoseUninitializedUse(S, vd, Use))
1578
            // Skip further diagnostics for this variable. We try to warn only
1579
            // on the first point at which a variable is used uninitialized.
1580
1.08k
            break;
1581
1.08k
        }
1582
1.09k
      }
1583
1584
      // Release the uses vector.
1585
1.09k
      delete vec;
1586
1.09k
    }
1587
1588
40.9k
    uses.clear();
1589
1590
    // Flush all const reference uses diags.
1591
23
    for (const auto &P : constRefUses) {
1592
23
      const VarDecl *vd = P.first;
1593
23
      const MappedType &V = P.second;
1594
1595
23
      UsesVec *vec = V.getPointer();
1596
23
      bool hasSelfInit = V.getInt();
1597
1598
23
      if (!vec->empty() && 
hasSelfInit15
&&
hasAlwaysUninitializedUse(vec)1
)
1599
1
        DiagnoseUninitializedUse(S, vd,
1600
1
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1601
1
                                           /* isAlwaysUninit */ true),
1602
1
                                 /* alwaysReportSelfInit */ true);
1603
22
      else {
1604
14
        for (const auto &U : *vec) {
1605
14
          if (DiagnoseUninitializedConstRefUse(S, vd, U))
1606
14
            break;
1607
14
        }
1608
22
      }
1609
1610
      // Release the uses vector.
1611
23
      delete vec;
1612
23
    }
1613
1614
40.9k
    constRefUses.clear();
1615
40.9k
  }
1616
1617
private:
1618
5
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1619
5
    return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
1620
5
      return U.getKind() == UninitUse::Always ||
1621
2
             U.getKind() == UninitUse::AfterCall ||
1622
2
             U.getKind() == UninitUse::AfterDecl;
1623
5
    });
1624
5
  }
1625
};
1626
} // anonymous namespace
1627
1628
namespace clang {
1629
namespace {
1630
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1631
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1632
typedef std::list<DelayedDiag> DiagList;
1633
1634
struct SortDiagBySourceLocation {
1635
  SourceManager &SM;
1636
2.26k
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1637
1638
2.50k
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1639
    // Although this call will be slow, this is only called when outputting
1640
    // multiple warnings.
1641
2.50k
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1642
2.50k
  }
1643
};
1644
} // anonymous namespace
1645
} // namespace clang
1646
1647
//===----------------------------------------------------------------------===//
1648
// -Wthread-safety
1649
//===----------------------------------------------------------------------===//
1650
namespace clang {
1651
namespace threadSafety {
1652
namespace {
1653
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1654
  Sema &S;
1655
  DiagList Warnings;
1656
  SourceLocation FunLocation, FunEndLocation;
1657
1658
  const FunctionDecl *CurrentFunction;
1659
  bool Verbose;
1660
1661
2.41k
  OptionalNotes getNotes() const {
1662
2.41k
    if (Verbose && 
CurrentFunction14
) {
1663
14
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1664
14
                                S.PDiag(diag::note_thread_warning_in_fun)
1665
14
                                    << CurrentFunction);
1666
14
      return OptionalNotes(1, FNote);
1667
14
    }
1668
2.40k
    return OptionalNotes();
1669
2.40k
  }
1670
1671
430
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1672
430
    OptionalNotes ONS(1, Note);
1673
430
    if (Verbose && 
CurrentFunction6
) {
1674
6
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1675
6
                                S.PDiag(diag::note_thread_warning_in_fun)
1676
6
                                    << CurrentFunction);
1677
6
      ONS.push_back(std::move(FNote));
1678
6
    }
1679
430
    return ONS;
1680
430
  }
1681
1682
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1683
0
                         const PartialDiagnosticAt &Note2) const {
1684
0
    OptionalNotes ONS;
1685
0
    ONS.push_back(Note1);
1686
0
    ONS.push_back(Note2);
1687
0
    if (Verbose && CurrentFunction) {
1688
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1689
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1690
0
                                    << CurrentFunction);
1691
0
      ONS.push_back(std::move(FNote));
1692
0
    }
1693
0
    return ONS;
1694
0
  }
1695
1696
274
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1697
274
    return LocLocked.isValid()
1698
274
               ? getNotes(PartialDiagnosticAt(
1699
274
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1700
0
               : getNotes();
1701
274
  }
1702
1703
  OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1704
100
                                     StringRef Kind) {
1705
100
    return LocUnlocked.isValid()
1706
54
               ? getNotes(PartialDiagnosticAt(
1707
54
                     LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1708
46
               : getNotes();
1709
100
  }
1710
1711
 public:
1712
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1713
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1714
2.17k
      CurrentFunction(nullptr), Verbose(false) {}
1715
1716
41
  void setVerbose(bool b) { Verbose = b; }
1717
1718
  /// Emit all buffered diagnostics in order of sourcelocation.
1719
  /// We need to output diagnostics produced while iterating through
1720
  /// the lockset in deterministic order, so this function orders diagnostics
1721
  /// and outputs them.
1722
2.17k
  void emitDiagnostics() {
1723
2.17k
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1724
2.84k
    for (const auto &Diag : Warnings) {
1725
2.84k
      S.Diag(Diag.first.first, Diag.first.second);
1726
2.84k
      for (const auto &Note : Diag.second)
1727
450
        S.Diag(Note.first, Note.second);
1728
2.84k
    }
1729
2.17k
  }
1730
1731
0
  void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1732
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1733
0
                                         << Loc);
1734
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1735
0
  }
1736
1737
  void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1738
100
                             SourceLocation LocPreviousUnlock) override {
1739
100
    if (Loc.isInvalid())
1740
0
      Loc = FunLocation;
1741
100
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1742
100
                                         << Kind << LockName);
1743
100
    Warnings.emplace_back(std::move(Warning),
1744
100
                          makeUnlockedHereNote(LocPreviousUnlock, Kind));
1745
100
  }
1746
1747
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1748
                                 LockKind Expected, LockKind Received,
1749
                                 SourceLocation LocLocked,
1750
18
                                 SourceLocation LocUnlock) override {
1751
18
    if (LocUnlock.isInvalid())
1752
0
      LocUnlock = FunLocation;
1753
18
    PartialDiagnosticAt Warning(
1754
18
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1755
18
                       << Kind << LockName << Received << Expected);
1756
18
    Warnings.emplace_back(std::move(Warning),
1757
18
                          makeLockedHereNote(LocLocked, Kind));
1758
18
  }
1759
1760
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1761
79
                        SourceLocation LocDoubleLock) override {
1762
79
    if (LocDoubleLock.isInvalid())
1763
0
      LocDoubleLock = FunLocation;
1764
79
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1765
79
                                                   << Kind << LockName);
1766
79
    Warnings.emplace_back(std::move(Warning),
1767
79
                          makeLockedHereNote(LocLocked, Kind));
1768
79
  }
1769
1770
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1771
                                 SourceLocation LocLocked,
1772
                                 SourceLocation LocEndOfScope,
1773
177
                                 LockErrorKind LEK) override {
1774
177
    unsigned DiagID = 0;
1775
177
    switch (LEK) {
1776
46
      case LEK_LockedSomePredecessors:
1777
46
        DiagID = diag::warn_lock_some_predecessors;
1778
46
        break;
1779
34
      case LEK_LockedSomeLoopIterations:
1780
34
        DiagID = diag::warn_expecting_lock_held_on_loop;
1781
34
        break;
1782
65
      case LEK_LockedAtEndOfFunction:
1783
65
        DiagID = diag::warn_no_unlock;
1784
65
        break;
1785
32
      case LEK_NotLockedAtEndOfFunction:
1786
32
        DiagID = diag::warn_expecting_locked;
1787
32
        break;
1788
177
    }
1789
177
    if (LocEndOfScope.isInvalid())
1790
127
      LocEndOfScope = FunEndLocation;
1791
1792
177
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1793
177
                                                               << LockName);
1794
177
    Warnings.emplace_back(std::move(Warning),
1795
177
                          makeLockedHereNote(LocLocked, Kind));
1796
177
  }
1797
1798
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1799
                                SourceLocation Loc1,
1800
20
                                SourceLocation Loc2) override {
1801
20
    PartialDiagnosticAt Warning(Loc1,
1802
20
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1803
20
                                    << Kind << LockName);
1804
20
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1805
20
                                       << Kind << LockName);
1806
20
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1807
20
  }
1808
1809
  void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1810
                         ProtectedOperationKind POK, AccessKind AK,
1811
34
                         SourceLocation Loc) override {
1812
34
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1813
34
           "Only works for variables");
1814
34
    unsigned DiagID = POK == POK_VarAccess?
1815
25
                        diag::warn_variable_requires_any_lock:
1816
9
                        diag::warn_var_deref_requires_any_lock;
1817
34
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1818
34
      << D << getLockKindFromAccessKind(AK));
1819
34
    Warnings.emplace_back(std::move(Warning), getNotes());
1820
34
  }
1821
1822
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1823
                          ProtectedOperationKind POK, Name LockName,
1824
                          LockKind LK, SourceLocation Loc,
1825
1.18k
                          Name *PossibleMatch) override {
1826
1.18k
    unsigned DiagID = 0;
1827
1.18k
    if (PossibleMatch) {
1828
76
      switch (POK) {
1829
56
        case POK_VarAccess:
1830
56
          DiagID = diag::warn_variable_requires_lock_precise;
1831
56
          break;
1832
0
        case POK_VarDereference:
1833
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1834
0
          break;
1835
20
        case POK_FunctionCall:
1836
20
          DiagID = diag::warn_fun_requires_lock_precise;
1837
20
          break;
1838
0
        case POK_PassByRef:
1839
0
          DiagID = diag::warn_guarded_pass_by_reference;
1840
0
          break;
1841
0
        case POK_PtPassByRef:
1842
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1843
0
          break;
1844
76
      }
1845
76
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1846
76
                                                       << D
1847
76
                                                       << LockName << LK);
1848
76
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1849
76
                                        << *PossibleMatch);
1850
76
      if (Verbose && 
POK == POK_VarAccess0
) {
1851
0
        PartialDiagnosticAt VNote(D->getLocation(),
1852
0
                                  S.PDiag(diag::note_guarded_by_declared_here)
1853
0
                                      << D->getDeclName());
1854
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1855
0
      } else
1856
76
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1857
1.10k
    } else {
1858
1.10k
      switch (POK) {
1859
586
        case POK_VarAccess:
1860
586
          DiagID = diag::warn_variable_requires_lock;
1861
586
          break;
1862
204
        case POK_VarDereference:
1863
204
          DiagID = diag::warn_var_deref_requires_lock;
1864
204
          break;
1865
206
        case POK_FunctionCall:
1866
206
          DiagID = diag::warn_fun_requires_lock;
1867
206
          break;
1868
88
        case POK_PassByRef:
1869
88
          DiagID = diag::warn_guarded_pass_by_reference;
1870
88
          break;
1871
20
        case POK_PtPassByRef:
1872
20
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1873
20
          break;
1874
1.10k
      }
1875
1.10k
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1876
1.10k
                                                       << D
1877
1.10k
                                                       << LockName << LK);
1878
1.10k
      if (Verbose && 
POK == POK_VarAccess12
) {
1879
6
        PartialDiagnosticAt Note(D->getLocation(),
1880
6
                                 S.PDiag(diag::note_guarded_by_declared_here));
1881
6
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1882
6
      } else
1883
1.09k
        Warnings.emplace_back(std::move(Warning), getNotes());
1884
1.10k
    }
1885
1.18k
  }
1886
1887
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
1888
1.08k
                             SourceLocation Loc) override {
1889
1.08k
    PartialDiagnosticAt Warning(Loc,
1890
1.08k
        S.PDiag(diag::warn_acquire_requires_negative_cap)
1891
1.08k
        << Kind << LockName << Neg);
1892
1.08k
    Warnings.emplace_back(std::move(Warning), getNotes());
1893
1.08k
  }
1894
1895
  void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
1896
14
                             SourceLocation Loc) override {
1897
14
    PartialDiagnosticAt Warning(
1898
14
        Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
1899
14
    Warnings.emplace_back(std::move(Warning), getNotes());
1900
14
  }
1901
1902
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
1903
77
                             SourceLocation Loc) override {
1904
77
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
1905
77
                                         << Kind << FunName << LockName);
1906
77
    Warnings.emplace_back(std::move(Warning), getNotes());
1907
77
  }
1908
1909
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
1910
49
                                SourceLocation Loc) override {
1911
49
    PartialDiagnosticAt Warning(Loc,
1912
49
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
1913
49
    Warnings.emplace_back(std::move(Warning), getNotes());
1914
49
  }
1915
1916
20
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
1917
20
    PartialDiagnosticAt Warning(Loc,
1918
20
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
1919
20
    Warnings.emplace_back(std::move(Warning), getNotes());
1920
20
  }
1921
1922
2.05k
  void enterFunction(const FunctionDecl* FD) override {
1923
2.05k
    CurrentFunction = FD;
1924
2.05k
  }
1925
1926
1.99k
  void leaveFunction(const FunctionDecl* FD) override {
1927
1.99k
    CurrentFunction = nullptr;
1928
1.99k
  }
1929
};
1930
} // anonymous namespace
1931
} // namespace threadSafety
1932
} // namespace clang
1933
1934
//===----------------------------------------------------------------------===//
1935
// -Wconsumed
1936
//===----------------------------------------------------------------------===//
1937
1938
namespace clang {
1939
namespace consumed {
1940
namespace {
1941
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
1942
1943
  Sema &S;
1944
  DiagList Warnings;
1945
1946
public:
1947
1948
97
  ConsumedWarningsHandler(Sema &S) : S(S) {}
1949
1950
94
  void emitDiagnostics() override {
1951
94
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1952
110
    for (const auto &Diag : Warnings) {
1953
110
      S.Diag(Diag.first.first, Diag.first.second);
1954
110
      for (const auto &Note : Diag.second)
1955
0
        S.Diag(Note.first, Note.second);
1956
110
    }
1957
94
  }
1958
1959
  void warnLoopStateMismatch(SourceLocation Loc,
1960
2
                             StringRef VariableName) override {
1961
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
1962
2
      VariableName);
1963
1964
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1965
2
  }
1966
1967
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
1968
                                        StringRef VariableName,
1969
                                        StringRef ExpectedState,
1970
2
                                        StringRef ObservedState) override {
1971
1972
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1973
2
      diag::warn_param_return_typestate_mismatch) << VariableName <<
1974
2
        ExpectedState << ObservedState);
1975
1976
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1977
2
  }
1978
1979
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1980
6
                                  StringRef ObservedState) override {
1981
1982
6
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1983
6
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
1984
1985
6
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1986
6
  }
1987
1988
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
1989
1
                                              StringRef TypeName) override {
1990
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1991
1
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
1992
1993
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1994
1
  }
1995
1996
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1997
1
                                   StringRef ObservedState) override {
1998
1999
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2000
1
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2001
2002
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2003
1
  }
2004
2005
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2006
4
                                   SourceLocation Loc) override {
2007
2008
4
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2009
4
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2010
2011
4
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2012
4
  }
2013
2014
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2015
94
                             StringRef State, SourceLocation Loc) override {
2016
2017
94
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2018
94
                                MethodName << VariableName << State);
2019
2020
94
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2021
94
  }
2022
};
2023
} // anonymous namespace
2024
} // namespace consumed
2025
} // namespace clang
2026
2027
//===----------------------------------------------------------------------===//
2028
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2029
//  warnings on a function, method, or block.
2030
//===----------------------------------------------------------------------===//
2031
2032
77.9k
clang::sema::AnalysisBasedWarnings::Policy::Policy() {
2033
77.9k
  enableCheckFallThrough = 1;
2034
77.9k
  enableCheckUnreachable = 0;
2035
77.9k
  enableThreadSafetyAnalysis = 0;
2036
77.9k
  enableConsumedAnalysis = 0;
2037
77.9k
}
2038
2039
467k
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2040
467k
  return (unsigned)!D.isIgnored(diag, SourceLocation());
2041
467k
}
2042
2043
clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2044
  : S(s),
2045
    NumFunctionsAnalyzed(0),
2046
    NumFunctionsWithBadCFGs(0),
2047
    NumCFGBlocks(0),
2048
    MaxCFGBlocksPerFunction(0),
2049
    NumUninitAnalysisFunctions(0),
2050
    NumUninitAnalysisVariables(0),
2051
    MaxUninitAnalysisVariablesPerFunction(0),
2052
    NumUninitAnalysisBlockVisits(0),
2053
77.9k
    MaxUninitAnalysisBlockVisitsPerFunction(0) {
2054
2055
77.9k
  using namespace diag;
2056
77.9k
  DiagnosticsEngine &D = S.getDiagnostics();
2057
2058
77.9k
  DefaultPolicy.enableCheckUnreachable =
2059
77.9k
    isEnabled(D, warn_unreachable) ||
2060
77.9k
    isEnabled(D, warn_unreachable_break) ||
2061
77.9k
    isEnabled(D, warn_unreachable_return) ||
2062
77.9k
    isEnabled(D, warn_unreachable_loop_increment);
2063
2064
77.9k
  DefaultPolicy.enableThreadSafetyAnalysis =
2065
77.9k
    isEnabled(D, warn_double_lock);
2066
2067
77.9k
  DefaultPolicy.enableConsumedAnalysis =
2068
77.9k
    isEnabled(D, warn_use_in_invalid_state);
2069
77.9k
}
2070
2071
536
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2072
536
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2073
0
    S.Diag(D.Loc, D.PD);
2074
536
}
2075
2076
void clang::sema::
2077
AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
2078
                                     sema::FunctionScopeInfo *fscope,
2079
2.89M
                                     const Decl *D, QualType BlockType) {
2080
2081
  // We avoid doing analysis-based warnings when there are errors for
2082
  // two reasons:
2083
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2084
  //     don't bother trying.
2085
  // (2) The code already has problems; running the analysis just takes more
2086
  //     time.
2087
2.89M
  DiagnosticsEngine &Diags = S.getDiagnostics();
2088
2089
  // Do not do any analysis if we are going to just ignore them.
2090
2.89M
  if (Diags.getIgnoreAllWarnings() ||
2091
2.74M
      (Diags.getSuppressSystemWarnings() &&
2092
2.74M
       S.SourceMgr.isInSystemHeader(D->getLocation())))
2093
2.67M
    return;
2094
2095
  // For code in dependent contexts, we'll do this at instantiation time.
2096
219k
  if (cast<DeclContext>(D)->isDependentContext())
2097
10.3k
    return;
2098
2099
209k
  if (S.hasUncompilableErrorOccurred()) {
2100
    // Flush out any possibly unreachable diagnostics.
2101
536
    flushDiagnostics(S, fscope);
2102
536
    return;
2103
536
  }
2104
2105
208k
  const Stmt *Body = D->getBody();
2106
208k
  assert(Body);
2107
2108
  // Construct the analysis context with the specified CFG build options.
2109
208k
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2110
2111
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2112
  // explosion for destructors that can result and the compile time hit.
2113
208k
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2114
208k
  AC.getCFGBuildOptions().AddEHEdges = false;
2115
208k
  AC.getCFGBuildOptions().AddInitializers = true;
2116
208k
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2117
208k
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2118
208k
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2119
208k
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2120
2121
  // Force that certain expressions appear as CFGElements in the CFG.  This
2122
  // is used to speed up various analyses.
2123
  // FIXME: This isn't the right factoring.  This is here for initial
2124
  // prototyping, but we need a way for analyses to say what expressions they
2125
  // expect to always be CFGElements and then fill in the BuildOptions
2126
  // appropriately.  This is essentially a layering violation.
2127
208k
  if (P.enableCheckUnreachable || 
P.enableThreadSafetyAnalysis208k
||
2128
206k
      P.enableConsumedAnalysis) {
2129
    // Unreachable code analysis and thread safety require a linearized CFG.
2130
2.38k
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2131
2.38k
  }
2132
206k
  else {
2133
206k
    AC.getCFGBuildOptions()
2134
206k
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2135
206k
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2136
206k
      .setAlwaysAdd(Stmt::BlockExprClass)
2137
206k
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2138
206k
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2139
206k
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2140
206k
      .setAlwaysAdd(Stmt::UnaryOperatorClass)
2141
206k
      .setAlwaysAdd(Stmt::AttributedStmtClass);
2142
206k
  }
2143
2144
  // Install the logical handler.
2145
208k
  llvm::Optional<LogicalErrorHandler> LEH;
2146
208k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2147
26.7k
    LEH.emplace(S);
2148
26.7k
    AC.getCFGBuildOptions().Observer = &*LEH;
2149
26.7k
  }
2150
2151
  // Emit delayed diagnostics.
2152
208k
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2153
3.13k
    bool analyzed = false;
2154
2155
    // Register the expressions with the CFGBuilder.
2156
8.67k
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2157
8.67k
      for (const Stmt *S : D.Stmts)
2158
8.98k
        AC.registerForcedBlockExpression(S);
2159
8.67k
    }
2160
2161
3.13k
    if (AC.getCFG()) {
2162
3.13k
      analyzed = true;
2163
8.67k
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2164
8.67k
        bool AllReachable = true;
2165
8.98k
        for (const Stmt *S : D.Stmts) {
2166
8.98k
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2167
8.98k
          CFGReverseBlockReachabilityAnalysis *cra =
2168
8.98k
              AC.getCFGReachablityAnalysis();
2169
          // FIXME: We should be able to assert that block is non-null, but
2170
          // the CFG analysis can skip potentially-evaluated expressions in
2171
          // edge cases; see test/Sema/vla-2.c.
2172
8.98k
          if (block && 
cra3.18k
) {
2173
            // Can this block be reached from the entrance?
2174
3.18k
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2175
67
              AllReachable = false;
2176
67
              break;
2177
67
            }
2178
3.18k
          }
2179
          // If we cannot map to a basic block, assume the statement is
2180
          // reachable.
2181
8.98k
        }
2182
2183
8.67k
        if (AllReachable)
2184
8.60k
          S.Diag(D.Loc, D.PD);
2185
8.67k
      }
2186
3.13k
    }
2187
2188
3.13k
    if (!analyzed)
2189
0
      flushDiagnostics(S, fscope);
2190
3.13k
  }
2191
2192
  // Warning: check missing 'return'
2193
208k
  if (P.enableCheckFallThrough) {
2194
202k
    const CheckFallThroughDiagnostics &CD =
2195
202k
        (isa<BlockDecl>(D)
2196
1.99k
             ? CheckFallThroughDiagnostics::MakeForBlock()
2197
200k
             : (isa<CXXMethodDecl>(D) &&
2198
48.8k
                cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2199
3.06k
                cast<CXXMethodDecl>(D)->getParent()->isLambda())
2200
2.98k
                   ? CheckFallThroughDiagnostics::MakeForLambda()
2201
197k
                   : (fscope->isCoroutine()
2202
82
                          ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2203
197k
                          : CheckFallThroughDiagnostics::MakeForFunction(D)));
2204
202k
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2205
202k
  }
2206
2207
  // Warning: check for unreachable code
2208
208k
  if (P.enableCheckUnreachable) {
2209
    // Only check for unreachable code on non-template instantiations.
2210
    // Different template instantiations can effectively change the control-flow
2211
    // and it is very difficult to prove that a snippet of code in a template
2212
    // is unreachable for all instantiations.
2213
163
    bool isTemplateInstantiation = false;
2214
163
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2215
159
      isTemplateInstantiation = Function->isTemplateInstantiation();
2216
163
    if (!isTemplateInstantiation)
2217
159
      CheckUnreachable(S, AC);
2218
163
  }
2219
2220
  // Check for thread safety violations
2221
208k
  if (P.enableThreadSafetyAnalysis) {
2222
2.17k
    SourceLocation FL = AC.getDecl()->getLocation();
2223
2.17k
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2224
2.17k
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2225
2.17k
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2226
2.09k
      Reporter.setIssueBetaWarnings(true);
2227
2.17k
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2228
41
      Reporter.setVerbose(true);
2229
2230
2.17k
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2231
2.17k
                                          &S.ThreadSafetyDeclCache);
2232
2.17k
    Reporter.emitDiagnostics();
2233
2.17k
  }
2234
2235
  // Check for violations of consumed properties.
2236
208k
  if (P.enableConsumedAnalysis) {
2237
97
    consumed::ConsumedWarningsHandler WarningHandler(S);
2238
97
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2239
97
    Analyzer.run(AC);
2240
97
  }
2241
2242
208k
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2243
168k
      !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2244
168k
      !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2245
168k
      !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2246
40.9k
    if (CFG *cfg = AC.getCFG()) {
2247
40.9k
      UninitValsDiagReporter reporter(S);
2248
40.9k
      UninitVariablesAnalysisStats stats;
2249
40.9k
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2250
40.9k
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2251
40.9k
                                        reporter, stats);
2252
2253
40.9k
      if (S.CollectStats && 
stats.NumVariablesAnalyzed > 00
) {
2254
0
        ++NumUninitAnalysisFunctions;
2255
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2256
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2257
0
        MaxUninitAnalysisVariablesPerFunction =
2258
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2259
0
                     stats.NumVariablesAnalyzed);
2260
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2261
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2262
0
                     stats.NumBlockVisits);
2263
0
      }
2264
40.9k
    }
2265
40.9k
  }
2266
2267
208k
  bool FallThroughDiagFull =
2268
208k
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2269
208k
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2270
208k
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2271
208k
  if (FallThroughDiagFull || 
FallThroughDiagPerFunction208k
||
2272
208k
      fscope->HasFallthroughStmt) {
2273
120
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2274
120
  }
2275
2276
208k
  if (S.getLangOpts().ObjCWeak &&
2277
1.38k
      !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2278
112
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2279
2280
2281
  // Check for infinite self-recursion in functions
2282
208k
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2283
26.7k
                       D->getBeginLoc())) {
2284
26.7k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2285
26.7k
      checkRecursiveFunction(S, FD, Body, AC);
2286
26.7k
    }
2287
26.7k
  }
2288
2289
  // Check for throw out of non-throwing function.
2290
208k
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2291
208k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2292
201k
      if (S.getLangOpts().CPlusPlus && 
isNoexcept(FD)126k
)
2293
7.33k
        checkThrowInNonThrowingFunc(S, FD, AC);
2294
2295
  // If none of the previous checks caused a CFG build, trigger one here
2296
  // for the logical error handler.
2297
208k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2298
26.7k
    AC.getCFG();
2299
26.7k
  }
2300
2301
  // Collect statistics about the CFG if it was built.
2302
208k
  if (S.CollectStats && 
AC.isCFGBuilt()3
) {
2303
3
    ++NumFunctionsAnalyzed;
2304
3
    if (CFG *cfg = AC.getCFG()) {
2305
      // If we successfully built a CFG for this context, record some more
2306
      // detail information about it.
2307
3
      NumCFGBlocks += cfg->getNumBlockIDs();
2308
3
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2309
3
                                         cfg->getNumBlockIDs());
2310
0
    } else {
2311
0
      ++NumFunctionsWithBadCFGs;
2312
0
    }
2313
3
  }
2314
208k
}
2315
2316
3
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2317
3
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2318
2319
3
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2320
3
  unsigned AvgCFGBlocksPerFunction =
2321
3
      !NumCFGsBuilt ? 
00
: NumCFGBlocks/NumCFGsBuilt;
2322
3
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2323
3
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2324
3
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2325
3
               << "  " << AvgCFGBlocksPerFunction
2326
3
               << " average CFG blocks per function.\n"
2327
3
               << "  " << MaxCFGBlocksPerFunction
2328
3
               << " max CFG blocks per function.\n";
2329
2330
3
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2331
0
      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2332
3
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2333
0
      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2334
3
  llvm::errs() << NumUninitAnalysisFunctions
2335
3
               << " functions analyzed for uninitialiazed variables\n"
2336
3
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2337
3
               << "  " << AvgUninitVariablesPerFunction
2338
3
               << " average variables per function.\n"
2339
3
               << "  " << MaxUninitAnalysisVariablesPerFunction
2340
3
               << " max variables per function.\n"
2341
3
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2342
3
               << "  " << AvgUninitBlockVisitsPerFunction
2343
3
               << " average block visits per function.\n"
2344
3
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2345
3
               << " max block visits per function.\n";
2346
3
}