Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/AST/DeclObjC.h"
18
#include "clang/AST/EvaluatedExprVisitor.h"
19
#include "clang/AST/ExprCXX.h"
20
#include "clang/AST/ExprObjC.h"
21
#include "clang/AST/ParentMap.h"
22
#include "clang/AST/RecursiveASTVisitor.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/AST/StmtObjC.h"
25
#include "clang/AST/StmtVisitor.h"
26
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27
#include "clang/Analysis/Analyses/Consumed.h"
28
#include "clang/Analysis/Analyses/ReachableCode.h"
29
#include "clang/Analysis/Analyses/ThreadSafety.h"
30
#include "clang/Analysis/Analyses/UninitializedValues.h"
31
#include "clang/Analysis/AnalysisDeclContext.h"
32
#include "clang/Analysis/CFG.h"
33
#include "clang/Analysis/CFGStmtMap.h"
34
#include "clang/Basic/SourceLocation.h"
35
#include "clang/Basic/SourceManager.h"
36
#include "clang/Lex/Preprocessor.h"
37
#include "clang/Sema/ScopeInfo.h"
38
#include "clang/Sema/SemaInternal.h"
39
#include "llvm/ADT/BitVector.h"
40
#include "llvm/ADT/MapVector.h"
41
#include "llvm/ADT/SmallString.h"
42
#include "llvm/ADT/SmallVector.h"
43
#include "llvm/ADT/StringRef.h"
44
#include "llvm/Support/Casting.h"
45
#include <algorithm>
46
#include <deque>
47
#include <iterator>
48
49
using namespace clang;
50
51
//===----------------------------------------------------------------------===//
52
// Unreachable code analysis.
53
//===----------------------------------------------------------------------===//
54
55
namespace {
56
  class UnreachableCodeHandler : public reachable_code::Callback {
57
    Sema &S;
58
    SourceRange PreviousSilenceableCondVal;
59
60
  public:
61
155
    UnreachableCodeHandler(Sema &s) : S(s) {}
62
63
    void HandleUnreachable(reachable_code::UnreachableKind UK,
64
                           SourceLocation L,
65
                           SourceRange SilenceableCondVal,
66
                           SourceRange R1,
67
169
                           SourceRange R2) override {
68
169
      // Avoid reporting multiple unreachable code diagnostics that are
69
169
      // triggered by the same conditional value.
70
169
      if (PreviousSilenceableCondVal.isValid() &&
71
169
          
SilenceableCondVal.isValid()24
&&
72
169
          
PreviousSilenceableCondVal == SilenceableCondVal24
)
73
2
        return;
74
167
      PreviousSilenceableCondVal = SilenceableCondVal;
75
167
76
167
      unsigned diag = diag::warn_unreachable;
77
167
      switch (UK) {
78
167
        case reachable_code::UK_Break:
79
16
          diag = diag::warn_unreachable_break;
80
16
          break;
81
167
        case reachable_code::UK_Return:
82
32
          diag = diag::warn_unreachable_return;
83
32
          break;
84
167
        case reachable_code::UK_Loop_Increment:
85
3
          diag = diag::warn_unreachable_loop_increment;
86
3
          break;
87
167
        case reachable_code::UK_Other:
88
116
          break;
89
167
      }
90
167
91
167
      S.Diag(L, diag) << R1 << R2;
92
167
93
167
      SourceLocation Open = SilenceableCondVal.getBegin();
94
167
      if (Open.isValid()) {
95
42
        SourceLocation Close = SilenceableCondVal.getEnd();
96
42
        Close = S.getLocForEndOfToken(Close);
97
42
        if (Close.isValid()) {
98
42
          S.Diag(Open, diag::note_unreachable_silence)
99
42
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
100
42
            << FixItHint::CreateInsertion(Close, ")");
101
42
        }
102
42
      }
103
167
    }
104
  };
105
} // anonymous namespace
106
107
/// CheckUnreachable - Check for unreachable code.
108
157
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
109
157
  // As a heuristic prune all diagnostics not in the main file.  Currently
110
157
  // the majority of warnings in headers are false positives.  These
111
157
  // are largely caused by configuration state, e.g. preprocessor
112
157
  // defined code, etc.
113
157
  //
114
157
  // Note that this is also a performance optimization.  Analyzing
115
157
  // headers many times can be expensive.
116
157
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
117
2
    return;
118
155
119
155
  UnreachableCodeHandler UC(S);
120
155
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
121
155
}
122
123
namespace {
124
/// Warn on logical operator errors in CFGBuilder
125
class LogicalErrorHandler : public CFGCallback {
126
  Sema &S;
127
128
public:
129
42
  LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
130
131
502
  static bool HasMacroID(const Expr *E) {
132
502
    if (E->getExprLoc().isMacroID())
133
3
      return true;
134
499
135
499
    // Recurse to children.
136
499
    for (const Stmt *SubStmt : E->children())
137
444
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
138
444
        if (HasMacroID(SubExpr))
139
8
          return true;
140
499
141
499
    
return false491
;
142
499
  }
143
144
47
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
145
47
    if (HasMacroID(B))
146
1
      return;
147
46
148
46
    SourceRange DiagRange = B->getSourceRange();
149
46
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
150
46
        << DiagRange << isAlwaysTrue;
151
46
  }
152
153
  void compareBitwiseEquality(const BinaryOperator *B,
154
11
                              bool isAlwaysTrue) override {
155
11
    if (HasMacroID(B))
156
2
      return;
157
9
158
9
    SourceRange DiagRange = B->getSourceRange();
159
9
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
160
9
        << DiagRange << isAlwaysTrue;
161
9
  }
162
};
163
} // anonymous namespace
164
165
//===----------------------------------------------------------------------===//
166
// Check for infinite self-recursion in functions
167
//===----------------------------------------------------------------------===//
168
169
// Returns true if the function is called anywhere within the CFGBlock.
170
// For member functions, the additional condition of being call from the
171
// this pointer is required.
172
701k
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
173
701k
  // Process all the Stmt's in this block to find any calls to FD.
174
5.75M
  for (const auto &B : Block) {
175
5.75M
    if (B.getKind() != CFGElement::Statement)
176
82.4k
      continue;
177
5.67M
178
5.67M
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
179
5.67M
    if (!CE || 
!CE->getCalleeDecl()468k
||
180
5.67M
        
CE->getCalleeDecl()->getCanonicalDecl() != FD468k
)
181
5.67M
      continue;
182
104
183
104
    // Skip function calls which are qualified with a templated class.
184
104
    if (const DeclRefExpr *DRE =
185
61
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
186
61
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
187
1
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
188
1
            isa<TemplateSpecializationType>(NNS->getAsType())) {
189
1
          continue;
190
1
        }
191
103
      }
192
61
    }
193
103
194
103
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
195
103
    if (!MCE || 
isa<CXXThisExpr>(MCE->getImplicitObjectArgument())43
||
196
103
        
!MCE->getMethodDecl()->isVirtual()17
)
197
103
      return true;
198
103
  }
199
701k
  
return false701k
;
200
701k
}
201
202
// Returns true if every path from the entry block passes through a call to FD.
203
297k
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
204
297k
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
205
297k
  llvm::SmallVector<CFGBlock *, 16> WorkList;
206
297k
  // Keep track of whether we found at least one recursive path.
207
297k
  bool foundRecursion = false;
208
297k
209
297k
  const unsigned ExitID = cfg->getExit().getBlockID();
210
297k
211
297k
  // Seed the work list with the entry block.
212
297k
  WorkList.push_back(&cfg->getEntry());
213
297k
214
840k
  while (!WorkList.empty()) {
215
840k
    CFGBlock *Block = WorkList.pop_back_val();
216
840k
217
1.62M
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; 
++I789k
) {
218
1.08M
      if (CFGBlock *SuccBlock = *I) {
219
1.02M
        if (!Visited.insert(SuccBlock).second)
220
26.6k
          continue;
221
998k
222
998k
        // Found a path to the exit node without a recursive call.
223
998k
        if (ExitID == SuccBlock->getBlockID())
224
297k
          return false;
225
701k
226
701k
        // If the successor block contains a recursive call, end analysis there.
227
701k
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
228
103
          foundRecursion = true;
229
103
          continue;
230
103
        }
231
701k
232
701k
        WorkList.push_back(SuccBlock);
233
701k
      }
234
1.08M
    }
235
840k
  }
236
297k
  
return foundRecursion13
;
237
297k
}
238
239
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
240
324k
                                   const Stmt *Body, AnalysisDeclContext &AC) {
241
324k
  FD = FD->getCanonicalDecl();
242
324k
243
324k
  // Only run on non-templated functions and non-templated members of
244
324k
  // templated classes.
245
324k
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
246
324k
      
FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization81.5k
)
247
27.5k
    return;
248
297k
249
297k
  CFG *cfg = AC.getCFG();
250
297k
  if (!cfg) 
return0
;
251
297k
252
297k
  // If the exit block is unreachable, skip processing the function.
253
297k
  if (cfg->getExit().pred_empty())
254
13
    return;
255
297k
256
297k
  // Emit diagnostic if a recursive function call is detected for all paths.
257
297k
  if (checkForRecursiveFunctionCall(FD, cfg))
258
12
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
259
297k
}
260
261
//===----------------------------------------------------------------------===//
262
// Check for throw in a non-throwing function.
263
//===----------------------------------------------------------------------===//
264
265
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
266
/// can reach ExitBlock.
267
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
268
81
                         CFG *Body) {
269
81
  SmallVector<CFGBlock *, 16> Stack;
270
81
  llvm::BitVector Queued(Body->getNumBlockIDs());
271
81
272
81
  Stack.push_back(&ThrowBlock);
273
81
  Queued[ThrowBlock.getBlockID()] = true;
274
81
275
173
  while (!Stack.empty()) {
276
138
    CFGBlock &UnwindBlock = *Stack.back();
277
138
    Stack.pop_back();
278
138
279
160
    for (auto &Succ : UnwindBlock.succs()) {
280
160
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
281
0
        continue;
282
160
283
160
      if (Succ->getBlockID() == Body->getExit().getBlockID())
284
46
        return true;
285
114
286
114
      if (auto *Catch =
287
57
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
288
57
        QualType Caught = Catch->getCaughtType();
289
57
        if (Caught.isNull() || // catch (...) catches everything
290
57
            
!E->getSubExpr()53
|| // throw; is considered cuaght by any handler
291
57
            
S.handlerCanCatch(Caught, E->getSubExpr()->getType())50
)
292
35
          // Exception doesn't escape via this path.
293
35
          break;
294
57
      } else {
295
57
        Stack.push_back(Succ);
296
57
        Queued[Succ->getBlockID()] = true;
297
57
      }
298
114
    }
299
138
  }
300
81
301
81
  
return false35
;
302
81
}
303
304
static void visitReachableThrows(
305
    CFG *BodyCFG,
306
14.4k
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
307
14.4k
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
308
14.4k
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
309
45.8k
  for (CFGBlock *B : *BodyCFG) {
310
45.8k
    if (!Reachable[B->getBlockID()])
311
1.38k
      continue;
312
101k
    
for (CFGElement &E : *B)44.4k
{
313
101k
      Optional<CFGStmt> S = E.getAs<CFGStmt>();
314
101k
      if (!S)
315
923
        continue;
316
100k
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
317
81
        Visit(Throw, *B);
318
100k
    }
319
44.4k
  }
320
14.4k
}
321
322
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
323
46
                                                 const FunctionDecl *FD) {
324
46
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
325
46
      FD->getTypeSourceInfo()) {
326
46
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
327
46
    if (S.getLangOpts().CPlusPlus11 &&
328
46
        (isa<CXXDestructorDecl>(FD) ||
329
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Delete35
||
330
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete34
)) {
331
12
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
332
12
                                         getAs<FunctionProtoType>())
333
12
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
334
12
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
335
12
            << FD->getExceptionSpecSourceRange();
336
12
    } else
337
34
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
338
34
          << FD->getExceptionSpecSourceRange();
339
46
  }
340
46
}
341
342
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
343
14.4k
                                        AnalysisDeclContext &AC) {
344
14.4k
  CFG *BodyCFG = AC.getCFG();
345
14.4k
  if (!BodyCFG)
346
0
    return;
347
14.4k
  if (BodyCFG->getExit().pred_empty())
348
0
    return;
349
14.4k
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
350
81
    if (throwEscapes(S, Throw, Block, BodyCFG))
351
46
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
352
81
  });
353
14.4k
}
354
355
372k
static bool isNoexcept(const FunctionDecl *FD) {
356
372k
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
357
372k
  if (FPT->isNothrow() || 
FD->hasAttr<NoThrowAttr>()358k
)
358
14.4k
    return true;
359
358k
  return false;
360
358k
}
361
362
//===----------------------------------------------------------------------===//
363
// Check for missing return value.
364
//===----------------------------------------------------------------------===//
365
366
enum ControlFlowKind {
367
  UnknownFallThrough,
368
  NeverFallThrough,
369
  MaybeFallThrough,
370
  AlwaysFallThrough,
371
  NeverFallThroughOrReturn
372
};
373
374
/// CheckFallThrough - Check that we don't fall off the end of a
375
/// Statement that should return a value.
376
///
377
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
378
/// MaybeFallThrough iff we might or might not fall off the end,
379
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
380
/// return.  We assume NeverFallThrough iff we never fall off the end of the
381
/// statement but we may return.  We assume that functions not marked noreturn
382
/// will return.
383
219k
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
384
219k
  CFG *cfg = AC.getCFG();
385
219k
  if (!cfg) 
return UnknownFallThrough36
;
386
219k
387
219k
  // The CFG leaves in dead things, and we don't want the dead code paths to
388
219k
  // confuse us, so we mark all live things first.
389
219k
  llvm::BitVector live(cfg->getNumBlockIDs());
390
219k
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
391
219k
                                                          live);
392
219k
393
219k
  bool AddEHEdges = AC.getAddEHEdges();
394
219k
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
395
31.6k
    // When there are things remaining dead, and we didn't add EH edges
396
31.6k
    // from CallExprs to the catch clauses, we have to go back and
397
31.6k
    // mark them as live.
398
445k
    
for (const auto *B : *cfg)31.6k
{
399
445k
      if (!live[B->getBlockID()]) {
400
118k
        if (B->pred_begin() == B->pred_end()) {
401
32.4k
          const Stmt *Term = B->getTerminatorStmt();
402
32.4k
          if (Term && 
isa<CXXTryStmt>(Term)196
)
403
63
            // When not adding EH edges from calls, catch clauses
404
63
            // can otherwise seem dead.  Avoid noting them as dead.
405
63
            count += reachable_code::ScanReachableFromBlock(B, live);
406
32.4k
          continue;
407
32.4k
        }
408
118k
      }
409
445k
    }
410
219k
411
219k
  // Now we know what is live, we check the live precessors of the exit block
412
219k
  // and look for fall through paths, being careful to ignore normal returns,
413
219k
  // and exceptional paths.
414
219k
  bool HasLiveReturn = false;
415
219k
  bool HasFakeEdge = false;
416
219k
  bool HasPlainEdge = false;
417
219k
  bool HasAbnormalEdge = false;
418
219k
419
219k
  // Ignore default cases that aren't likely to be reachable because all
420
219k
  // enums in a switch(X) have explicit case statements.
421
219k
  CFGBlock::FilterOptions FO;
422
219k
  FO.IgnoreDefaultsWithCoveredEnums = 1;
423
219k
424
219k
  for (CFGBlock::filtered_pred_iterator I =
425
219k
           cfg->getExit().filtered_pred_start_end(FO);
426
570k
       I.hasMore(); 
++I350k
) {
427
350k
    const CFGBlock &B = **I;
428
350k
    if (!live[B.getBlockID()])
429
56.3k
      continue;
430
293k
431
293k
    // Skip blocks which contain an element marked as no-return. They don't
432
293k
    // represent actually viable edges into the exit block, so mark them as
433
293k
    // abnormal.
434
293k
    if (B.hasNoReturnElement()) {
435
25.1k
      HasAbnormalEdge = true;
436
25.1k
      continue;
437
25.1k
    }
438
268k
439
268k
    // Destructors can appear after the 'return' in the CFG.  This is
440
268k
    // normal.  We need to look pass the destructors for the return
441
268k
    // statement (if it exists).
442
268k
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
443
268k
444
278k
    for ( ; ri != re ; 
++ri9.58k
)
445
277k
      if (ri->getAs<CFGStmt>())
446
268k
        break;
447
268k
448
268k
    // No more CFGElements in the block?
449
268k
    if (ri == re) {
450
377
      const Stmt *Term = B.getTerminatorStmt();
451
377
      if (Term && 
isa<CXXTryStmt>(Term)55
) {
452
51
        HasAbnormalEdge = true;
453
51
        continue;
454
51
      }
455
326
      // A labeled empty statement, or the entry block...
456
326
      HasPlainEdge = true;
457
326
      continue;
458
326
    }
459
268k
460
268k
    CFGStmt CS = ri->castAs<CFGStmt>();
461
268k
    const Stmt *S = CS.getStmt();
462
268k
    if (isa<ReturnStmt>(S) || 
isa<CoreturnStmt>(S)286
) {
463
268k
      HasLiveReturn = true;
464
268k
      continue;
465
268k
    }
466
274
    if (isa<ObjCAtThrowStmt>(S)) {
467
2
      HasFakeEdge = true;
468
2
      continue;
469
2
    }
470
272
    if (isa<CXXThrowExpr>(S)) {
471
46
      HasFakeEdge = true;
472
46
      continue;
473
46
    }
474
226
    if (isa<MSAsmStmt>(S)) {
475
13
      // TODO: Verify this is correct.
476
13
      HasFakeEdge = true;
477
13
      HasLiveReturn = true;
478
13
      continue;
479
13
    }
480
213
    if (isa<CXXTryStmt>(S)) {
481
0
      HasAbnormalEdge = true;
482
0
      continue;
483
0
    }
484
213
    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
485
213
        == B.succ_end()) {
486
0
      HasAbnormalEdge = true;
487
0
      continue;
488
0
    }
489
213
490
213
    HasPlainEdge = true;
491
213
  }
492
219k
  if (!HasPlainEdge) {
493
219k
    if (HasLiveReturn)
494
217k
      return NeverFallThrough;
495
1.50k
    return NeverFallThroughOrReturn;
496
1.50k
  }
497
532
  if (HasAbnormalEdge || 
HasFakeEdge514
||
HasLiveReturn514
)
498
39
    return MaybeFallThrough;
499
493
  // This says AlwaysFallThrough for calls to functions that are not marked
500
493
  // noreturn, that don't return.  If people would like this warning to be more
501
493
  // accurate, such functions should be marked as noreturn.
502
493
  return AlwaysFallThrough;
503
493
}
504
505
namespace {
506
507
struct CheckFallThroughDiagnostics {
508
  unsigned diag_MaybeFallThrough_HasNoReturn;
509
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
510
  unsigned diag_AlwaysFallThrough_HasNoReturn;
511
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
512
  unsigned diag_NeverFallThroughOrReturn;
513
  enum { Function, Block, Lambda, Coroutine } funMode;
514
  SourceLocation FuncLoc;
515
516
429k
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
517
429k
    CheckFallThroughDiagnostics D;
518
429k
    D.FuncLoc = Func->getLocation();
519
429k
    D.diag_MaybeFallThrough_HasNoReturn =
520
429k
      diag::warn_falloff_noreturn_function;
521
429k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
522
429k
      diag::warn_maybe_falloff_nonvoid_function;
523
429k
    D.diag_AlwaysFallThrough_HasNoReturn =
524
429k
      diag::warn_falloff_noreturn_function;
525
429k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
526
429k
      diag::warn_falloff_nonvoid_function;
527
429k
528
429k
    // Don't suggest that virtual functions be marked "noreturn", since they
529
429k
    // might be overridden by non-noreturn functions.
530
429k
    bool isVirtualMethod = false;
531
429k
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
532
217k
      isVirtualMethod = Method->isVirtual();
533
429k
534
429k
    // Don't suggest that template instantiations be marked "noreturn"
535
429k
    bool isTemplateInstantiation = false;
536
429k
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
537
425k
      isTemplateInstantiation = Function->isTemplateInstantiation();
538
429k
539
429k
    if (!isVirtualMethod && 
!isTemplateInstantiation417k
)
540
326k
      D.diag_NeverFallThroughOrReturn =
541
326k
        diag::warn_suggest_noreturn_function;
542
103k
    else
543
103k
      D.diag_NeverFallThroughOrReturn = 0;
544
429k
545
429k
    D.funMode = Function;
546
429k
    return D;
547
429k
  }
548
549
65
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
550
65
    CheckFallThroughDiagnostics D;
551
65
    D.FuncLoc = Func->getLocation();
552
65
    D.diag_MaybeFallThrough_HasNoReturn = 0;
553
65
    D.diag_MaybeFallThrough_ReturnsNonVoid =
554
65
        diag::warn_maybe_falloff_nonvoid_coroutine;
555
65
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
556
65
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
557
65
        diag::warn_falloff_nonvoid_coroutine;
558
65
    D.funMode = Coroutine;
559
65
    return D;
560
65
  }
561
562
2.11k
  static CheckFallThroughDiagnostics MakeForBlock() {
563
2.11k
    CheckFallThroughDiagnostics D;
564
2.11k
    D.diag_MaybeFallThrough_HasNoReturn =
565
2.11k
      diag::err_noreturn_block_has_return_expr;
566
2.11k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
567
2.11k
      diag::err_maybe_falloff_nonvoid_block;
568
2.11k
    D.diag_AlwaysFallThrough_HasNoReturn =
569
2.11k
      diag::err_noreturn_block_has_return_expr;
570
2.11k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
571
2.11k
      diag::err_falloff_nonvoid_block;
572
2.11k
    D.diag_NeverFallThroughOrReturn = 0;
573
2.11k
    D.funMode = Block;
574
2.11k
    return D;
575
2.11k
  }
576
577
2.61k
  static CheckFallThroughDiagnostics MakeForLambda() {
578
2.61k
    CheckFallThroughDiagnostics D;
579
2.61k
    D.diag_MaybeFallThrough_HasNoReturn =
580
2.61k
      diag::err_noreturn_lambda_has_return_expr;
581
2.61k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
582
2.61k
      diag::warn_maybe_falloff_nonvoid_lambda;
583
2.61k
    D.diag_AlwaysFallThrough_HasNoReturn =
584
2.61k
      diag::err_noreturn_lambda_has_return_expr;
585
2.61k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
586
2.61k
      diag::warn_falloff_nonvoid_lambda;
587
2.61k
    D.diag_NeverFallThroughOrReturn = 0;
588
2.61k
    D.funMode = Lambda;
589
2.61k
    return D;
590
2.61k
  }
591
592
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
593
434k
                        bool HasNoReturn) const {
594
434k
    if (funMode == Function) {
595
429k
      return (ReturnsVoid ||
596
429k
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
597
218k
                          FuncLoc)) &&
598
429k
             
(211k
!HasNoReturn211k
||
599
211k
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
600
546
                          FuncLoc)) &&
601
429k
             
(211k
!ReturnsVoid211k
||
602
211k
              
D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc)210k
);
603
429k
    }
604
4.79k
    if (funMode == Coroutine) {
605
65
      return (ReturnsVoid ||
606
65
              
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc)15
||
607
65
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
608
15
                          FuncLoc)) &&
609
65
             
(!HasNoReturn)50
;
610
65
    }
611
4.72k
    // For blocks / lambdas.
612
4.72k
    return ReturnsVoid && 
!HasNoReturn3.69k
;
613
4.72k
  }
614
};
615
616
} // anonymous namespace
617
618
/// CheckFallThroughForBody - Check that we don't fall off the end of a
619
/// function that should return a value.  Check that we don't fall off the end
620
/// of a noreturn function.  We assume that functions and blocks not marked
621
/// noreturn will return.
622
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
623
                                    QualType BlockType,
624
                                    const CheckFallThroughDiagnostics &CD,
625
                                    AnalysisDeclContext &AC,
626
434k
                                    sema::FunctionScopeInfo *FSI) {
627
434k
628
434k
  bool ReturnsVoid = false;
629
434k
  bool HasNoReturn = false;
630
434k
  bool IsCoroutine = FSI->isCoroutine();
631
434k
632
434k
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
633
427k
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
634
66
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
635
427k
    else
636
427k
      ReturnsVoid = FD->getReturnType()->isVoidType();
637
427k
    HasNoReturn = FD->isNoReturn();
638
427k
  }
639
6.52k
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
640
4.41k
    ReturnsVoid = MD->getReturnType()->isVoidType();
641
4.41k
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
642
4.41k
  }
643
2.11k
  else if (isa<BlockDecl>(D)) {
644
2.11k
    if (const FunctionType *FT =
645
2.11k
          BlockType->getPointeeType()->getAs<FunctionType>()) {
646
2.11k
      if (FT->getReturnType()->isVoidType())
647
1.75k
        ReturnsVoid = true;
648
2.11k
      if (FT->getNoReturnAttr())
649
1
        HasNoReturn = true;
650
2.11k
    }
651
2.11k
  }
652
434k
653
434k
  DiagnosticsEngine &Diags = S.getDiagnostics();
654
434k
655
434k
  // Short circuit for compilation speed.
656
434k
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
657
214k
      return;
658
219k
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
659
219k
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
660
485
    if (IsCoroutine)
661
6
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
662
479
    else
663
479
      S.Diag(Loc, DiagID);
664
485
  };
665
219k
666
219k
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
667
219k
  if (D->getAsFunction() && 
D->getAsFunction()->isCPUDispatchMultiVersion()216k
)
668
0
    return;
669
219k
670
219k
  // Either in a function body compound statement, or a function-try-block.
671
219k
  switch (CheckFallThrough(AC)) {
672
219k
    case UnknownFallThrough:
673
36
      break;
674
219k
675
219k
    case MaybeFallThrough:
676
39
      if (HasNoReturn)
677
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
678
39
      else if (!ReturnsVoid)
679
38
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
680
39
      break;
681
219k
    case AlwaysFallThrough:
682
493
      if (HasNoReturn)
683
21
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
684
472
      else if (!ReturnsVoid)
685
426
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
686
493
      break;
687
219k
    case NeverFallThroughOrReturn:
688
1.50k
      if (ReturnsVoid && 
!HasNoReturn533
&&
CD.diag_NeverFallThroughOrReturn9
) {
689
4
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
690
3
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
691
3
        } else 
if (const ObjCMethodDecl *1
MD1
= dyn_cast<ObjCMethodDecl>(D)) {
692
1
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
693
1
        } else {
694
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
695
0
        }
696
4
      }
697
1.50k
      break;
698
219k
    case NeverFallThrough:
699
217k
      break;
700
219k
  }
701
219k
}
702
703
//===----------------------------------------------------------------------===//
704
// -Wuninitialized
705
//===----------------------------------------------------------------------===//
706
707
namespace {
708
/// ContainsReference - A visitor class to search for references to
709
/// a particular declaration (the needle) within any evaluated component of an
710
/// expression (recursively).
711
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
712
  bool FoundReference;
713
  const DeclRefExpr *Needle;
714
715
public:
716
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
717
718
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
719
62
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
720
721
274
  void VisitExpr(const Expr *E) {
722
274
    // Stop evaluating if we already have a reference.
723
274
    if (FoundReference)
724
17
      return;
725
257
726
257
    Inherited::VisitExpr(E);
727
257
  }
728
729
118
  void VisitDeclRefExpr(const DeclRefExpr *E) {
730
118
    if (E == Needle)
731
60
      FoundReference = true;
732
58
    else
733
58
      Inherited::VisitDeclRefExpr(E);
734
118
  }
735
736
62
  bool doesContainReference() const { return FoundReference; }
737
};
738
} // anonymous namespace
739
740
408
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
741
408
  QualType VariableTy = VD->getType().getCanonicalType();
742
408
  if (VariableTy->isBlockPointerType() &&
743
408
      
!VD->hasAttr<BlocksAttr>()3
) {
744
3
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
745
3
        << VD->getDeclName()
746
3
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
747
3
    return true;
748
3
  }
749
405
750
405
  // Don't issue a fixit if there is already an initializer.
751
405
  if (VD->getInit())
752
2
    return false;
753
403
754
403
  // Don't suggest a fixit inside macros.
755
403
  if (VD->getEndLoc().isMacroID())
756
2
    return false;
757
401
758
401
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
759
401
760
401
  // Suggest possible initialization (if any).
761
401
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
762
401
  if (Init.empty())
763
5
    return false;
764
396
765
396
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
766
396
    << FixItHint::CreateInsertion(Loc, Init);
767
396
  return true;
768
396
}
769
770
/// Create a fixit to remove an if-like statement, on the assumption that its
771
/// condition is CondVal.
772
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
773
                          const Stmt *Else, bool CondVal,
774
23
                          FixItHint &Fixit1, FixItHint &Fixit2) {
775
23
  if (CondVal) {
776
12
    // If condition is always true, remove all but the 'then'.
777
12
    Fixit1 = FixItHint::CreateRemoval(
778
12
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
779
12
    if (Else) {
780
2
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
781
2
      Fixit2 =
782
2
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
783
2
    }
784
12
  } else {
785
11
    // If condition is always false, remove all but the 'else'.
786
11
    if (Else)
787
11
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
788
11
          If->getBeginLoc(), Else->getBeginLoc()));
789
0
    else
790
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
791
11
  }
792
23
}
793
794
/// DiagUninitUse -- Helper function to produce a diagnostic for an
795
/// uninitialized use of a variable.
796
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
797
405
                          bool IsCapturedByBlock) {
798
405
  bool Diagnosed = false;
799
405
800
405
  switch (Use.getKind()) {
801
405
  case UninitUse::Always:
802
279
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
803
279
        << VD->getDeclName() << IsCapturedByBlock
804
279
        << Use.getUser()->getSourceRange();
805
279
    return;
806
405
807
405
  case UninitUse::AfterDecl:
808
8
  case UninitUse::AfterCall:
809
8
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
810
8
      << VD->getDeclName() << IsCapturedByBlock
811
8
      << (Use.getKind() == UninitUse::AfterDecl ? 
42
:
56
)
812
8
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
813
8
      << VD->getSourceRange();
814
8
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
815
8
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
816
8
    return;
817
8
818
118
  case UninitUse::Maybe:
819
118
  case UninitUse::Sometimes:
820
118
    // Carry on to report sometimes-uninitialized branches, if possible,
821
118
    // or a 'may be used uninitialized' diagnostic otherwise.
822
118
    break;
823
118
  }
824
118
825
118
  // Diagnose each branch which leads to a sometimes-uninitialized use.
826
118
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
827
173
       I != E; 
++I55
) {
828
55
    assert(Use.getKind() == UninitUse::Sometimes);
829
55
830
55
    const Expr *User = Use.getUser();
831
55
    const Stmt *Term = I->Terminator;
832
55
833
55
    // Information used when building the diagnostic.
834
55
    unsigned DiagKind;
835
55
    StringRef Str;
836
55
    SourceRange Range;
837
55
838
55
    // FixIts to suppress the diagnostic by removing the dead condition.
839
55
    // For all binary terminators, branch 0 is taken if the condition is true,
840
55
    // and branch 1 is taken if the condition is false.
841
55
    int RemoveDiagKind = -1;
842
55
    const char *FixitStr =
843
55
        S.getLangOpts().CPlusPlus ? 
(I->Output 47
?
"true"20
:
"false"27
)
844
55
                                  : 
(I->Output 8
?
"1"4
:
"0"4
);
845
55
    FixItHint Fixit1, Fixit2;
846
55
847
55
    switch (Term ? Term->getStmtClass() : 
Stmt::DeclStmtClass0
) {
848
55
    default:
849
0
      // Don't know how to report this. Just fall back to 'may be used
850
0
      // uninitialized'. FIXME: Can this happen?
851
0
      continue;
852
55
853
55
    // "condition is true / condition is false".
854
55
    case Stmt::IfStmtClass: {
855
19
      const IfStmt *IS = cast<IfStmt>(Term);
856
19
      DiagKind = 0;
857
19
      Str = "if";
858
19
      Range = IS->getCond()->getSourceRange();
859
19
      RemoveDiagKind = 0;
860
19
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
861
19
                    I->Output, Fixit1, Fixit2);
862
19
      break;
863
55
    }
864
55
    case Stmt::ConditionalOperatorClass: {
865
4
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
866
4
      DiagKind = 0;
867
4
      Str = "?:";
868
4
      Range = CO->getCond()->getSourceRange();
869
4
      RemoveDiagKind = 0;
870
4
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
871
4
                    I->Output, Fixit1, Fixit2);
872
4
      break;
873
55
    }
874
55
    case Stmt::BinaryOperatorClass: {
875
12
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
876
12
      if (!BO->isLogicalOp())
877
0
        continue;
878
12
      DiagKind = 0;
879
12
      Str = BO->getOpcodeStr();
880
12
      Range = BO->getLHS()->getSourceRange();
881
12
      RemoveDiagKind = 0;
882
12
      if ((BO->getOpcode() == BO_LAnd && 
I->Output4
) ||
883
12
          
(10
BO->getOpcode() == BO_LOr10
&&
!I->Output8
))
884
8
        // true && y -> y, false || y -> y.
885
8
        Fixit1 = FixItHint::CreateRemoval(
886
8
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
887
4
      else
888
4
        // false && y -> false, true || y -> true.
889
4
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
890
12
      break;
891
12
    }
892
12
893
12
    // "loop is entered / loop is exited".
894
12
    case Stmt::WhileStmtClass:
895
4
      DiagKind = 1;
896
4
      Str = "while";
897
4
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
898
4
      RemoveDiagKind = 1;
899
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
900
4
      break;
901
12
    case Stmt::ForStmtClass:
902
4
      DiagKind = 1;
903
4
      Str = "for";
904
4
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
905
4
      RemoveDiagKind = 1;
906
4
      if (I->Output)
907
2
        Fixit1 = FixItHint::CreateRemoval(Range);
908
2
      else
909
2
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
910
4
      break;
911
12
    case Stmt::CXXForRangeStmtClass:
912
4
      if (I->Output == 1) {
913
2
        // The use occurs if a range-based for loop's body never executes.
914
2
        // That may be impossible, and there's no syntactic fix for this,
915
2
        // so treat it as a 'may be uninitialized' case.
916
2
        continue;
917
2
      }
918
2
      DiagKind = 1;
919
2
      Str = "for";
920
2
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
921
2
      break;
922
2
923
2
    // "condition is true / loop is exited".
924
4
    case Stmt::DoStmtClass:
925
4
      DiagKind = 2;
926
4
      Str = "do";
927
4
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
928
4
      RemoveDiagKind = 1;
929
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
930
4
      break;
931
2
932
2
    // "switch case is taken".
933
2
    case Stmt::CaseStmtClass:
934
2
      DiagKind = 3;
935
2
      Str = "case";
936
2
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
937
2
      break;
938
2
    case Stmt::DefaultStmtClass:
939
2
      DiagKind = 3;
940
2
      Str = "default";
941
2
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
942
2
      break;
943
53
    }
944
53
945
53
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
946
53
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
947
53
      << Str << I->Output << Range;
948
53
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
949
53
        << IsCapturedByBlock << User->getSourceRange();
950
53
    if (RemoveDiagKind != -1)
951
47
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
952
47
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
953
53
954
53
    Diagnosed = true;
955
53
  }
956
118
957
118
  if (!Diagnosed)
958
67
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
959
67
        << VD->getDeclName() << IsCapturedByBlock
960
67
        << Use.getUser()->getSourceRange();
961
118
}
962
963
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
964
/// uninitialized variable. This manages the different forms of diagnostic
965
/// emitted for particular types of uses. Returns true if the use was diagnosed
966
/// as a warning. If a particular use is one we omit warnings for, returns
967
/// false.
968
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
969
                                     const UninitUse &Use,
970
468
                                     bool alwaysReportSelfInit = false) {
971
468
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
972
463
    // Inspect the initializer of the variable declaration which is
973
463
    // being referenced prior to its initialization. We emit
974
463
    // specialized diagnostics for self-initialization, and we
975
463
    // specifically avoid warning about self references which take the
976
463
    // form of:
977
463
    //
978
463
    //   int x = x;
979
463
    //
980
463
    // This is used to indicate to GCC that 'x' is intentionally left
981
463
    // uninitialized. Proven code paths which access 'x' in
982
463
    // an uninitialized state after this will still warn.
983
463
    if (const Expr *Initializer = VD->getInit()) {
984
62
      if (!alwaysReportSelfInit && 
DRE == Initializer->IgnoreParenImpCasts()60
)
985
0
        return false;
986
62
987
62
      ContainsReference CR(S.Context, DRE);
988
62
      CR.Visit(Initializer);
989
62
      if (CR.doesContainReference()) {
990
60
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
991
60
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
992
60
        return true;
993
60
      }
994
403
    }
995
403
996
403
    DiagUninitUse(S, VD, Use, false);
997
403
  } else {
998
5
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
999
5
    if (VD->getType()->isBlockPointerType() && 
!VD->hasAttr<BlocksAttr>()3
)
1000
3
      S.Diag(BE->getBeginLoc(),
1001
3
             diag::warn_uninit_byref_blockvar_captured_by_block)
1002
3
          << VD->getDeclName()
1003
3
          << VD->getType().getQualifiers().hasObjCLifetime();
1004
2
    else
1005
2
      DiagUninitUse(S, VD, Use, true);
1006
5
  }
1007
468
1008
468
  // Report where the variable was declared when the use wasn't within
1009
468
  // the initializer of that declaration & we didn't already suggest
1010
468
  // an initialization fixit.
1011
468
  
if (408
!SuggestInitializationFixit(S, VD)408
)
1012
9
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1013
9
        << VD->getDeclName();
1014
408
1015
408
  return true;
1016
468
}
1017
1018
namespace {
1019
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1020
  public:
1021
    FallthroughMapper(Sema &S)
1022
      : FoundSwitchStatements(false),
1023
88
        S(S) {
1024
88
    }
1025
1026
88
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1027
1028
38
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1029
38
      bool Found = FallthroughStmts.erase(Stmt);
1030
38
      assert(Found);
1031
38
      (void)Found;
1032
38
    }
1033
1034
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1035
1036
82
    const AttrStmts &getFallthroughStmts() const {
1037
82
      return FallthroughStmts;
1038
82
    }
1039
1040
67
    void fillReachableBlocks(CFG *Cfg) {
1041
67
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1042
67
      std::deque<const CFGBlock *> BlockQueue;
1043
67
1044
67
      ReachableBlocks.insert(&Cfg->getEntry());
1045
67
      BlockQueue.push_back(&Cfg->getEntry());
1046
67
      // Mark all case blocks reachable to avoid problems with switching on
1047
67
      // constants, covered enums, etc.
1048
67
      // These blocks can contain fall-through annotations, and we don't want to
1049
67
      // issue a warn_fallthrough_attr_unreachable for them.
1050
541
      for (const auto *B : *Cfg) {
1051
541
        const Stmt *L = B->getLabel();
1052
541
        if (L && 
isa<SwitchCase>(L)212
&&
ReachableBlocks.insert(B).second209
)
1053
209
          BlockQueue.push_back(B);
1054
541
      }
1055
67
1056
585
      while (!BlockQueue.empty()) {
1057
518
        const CFGBlock *P = BlockQueue.front();
1058
518
        BlockQueue.pop_front();
1059
518
        for (CFGBlock::const_succ_iterator I = P->succ_begin(),
1060
518
                                           E = P->succ_end();
1061
1.19k
             I != E; 
++I675
) {
1062
675
          if (*I && 
ReachableBlocks.insert(*I).second655
)
1063
242
            BlockQueue.push_back(*I);
1064
675
        }
1065
518
      }
1066
67
    }
1067
1068
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1069
209
                                   bool IsTemplateInstantiation) {
1070
209
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1071
209
1072
209
      int UnannotatedCnt = 0;
1073
209
      AnnotatedCnt = 0;
1074
209
1075
209
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1076
544
      while (!BlockQueue.empty()) {
1077
335
        const CFGBlock *P = BlockQueue.front();
1078
335
        BlockQueue.pop_front();
1079
335
        if (!P) 
continue6
;
1080
329
1081
329
        const Stmt *Term = P->getTerminatorStmt();
1082
329
        if (Term && 
isa<SwitchStmt>(Term)222
)
1083
207
          continue; // Switch statement, good.
1084
122
1085
122
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1086
122
        if (SW && 
SW->getSubStmt() == B.getLabel()93
&&
P->begin() == P->end()3
)
1087
3
          continue; // Previous case label has no statements, good.
1088
119
1089
119
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1090
119
        if (L && 
L->getSubStmt() == B.getLabel()3
&&
P->begin() == P->end()3
)
1091
3
          continue; // Case label is preceded with a normal label, good.
1092
116
1093
116
        if (!ReachableBlocks.count(P)) {
1094
12
          for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
1095
12
                                                ElemEnd = P->rend();
1096
27
               ElemIt != ElemEnd; 
++ElemIt15
) {
1097
20
            if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
1098
19
              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1099
5
                // Don't issue a warning for an unreachable fallthrough
1100
5
                // attribute in template instantiations as it may not be
1101
5
                // unreachable in all instantiations of the template.
1102
5
                if (!IsTemplateInstantiation)
1103
4
                  S.Diag(AS->getBeginLoc(),
1104
4
                         diag::warn_fallthrough_attr_unreachable);
1105
5
                markFallthroughVisited(AS);
1106
5
                ++AnnotatedCnt;
1107
5
                break;
1108
5
              }
1109
19
              // Don't care about other unreachable statements.
1110
19
            }
1111
20
          }
1112
12
          // If there are no unreachable statements, this may be a special
1113
12
          // case in CFG:
1114
12
          // case X: {
1115
12
          //    A a;  // A has a destructor.
1116
12
          //    break;
1117
12
          // }
1118
12
          // // <<<< This place is represented by a 'hanging' CFG block.
1119
12
          // case Y:
1120
12
          continue;
1121
12
        }
1122
104
1123
104
        const Stmt *LastStmt = getLastStmt(*P);
1124
104
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1125
33
          markFallthroughVisited(AS);
1126
33
          ++AnnotatedCnt;
1127
33
          continue; // Fallthrough annotation, good.
1128
33
        }
1129
71
1130
71
        if (!LastStmt) { // This block contains no executable statements.
1131
1
          // Traverse its predecessors.
1132
1
          std::copy(P->pred_begin(), P->pred_end(),
1133
1
                    std::back_inserter(BlockQueue));
1134
1
          continue;
1135
1
        }
1136
70
1137
70
        ++UnannotatedCnt;
1138
70
      }
1139
209
      return !!UnannotatedCnt;
1140
209
    }
1141
1142
    // RecursiveASTVisitor setup.
1143
5
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1144
1145
54
    bool VisitAttributedStmt(AttributedStmt *S) {
1146
54
      if (asFallThroughAttr(S))
1147
54
        FallthroughStmts.insert(S);
1148
54
      return true;
1149
54
    }
1150
1151
83
    bool VisitSwitchStmt(SwitchStmt *S) {
1152
83
      FoundSwitchStatements = true;
1153
83
      return true;
1154
83
    }
1155
1156
    // We don't want to traverse local type declarations. We analyze their
1157
    // methods separately.
1158
16
    bool TraverseDecl(Decl *D) { return true; }
1159
1160
    // We analyze lambda bodies separately. Skip them here.
1161
1
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1162
1
      // Traverse the captures, but not the body.
1163
1
      for (const auto &C : zip(LE->captures(), LE->capture_inits()))
1164
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1165
1
      return true;
1166
1
    }
1167
1168
  private:
1169
1170
177
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1171
177
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1172
92
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1173
92
          return AS;
1174
85
      }
1175
85
      return nullptr;
1176
85
    }
1177
1178
104
    static const Stmt *getLastStmt(const CFGBlock &B) {
1179
104
      if (const Stmt *Term = B.getTerminatorStmt())
1180
12
        return Term;
1181
92
      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
1182
92
                                            ElemEnd = B.rend();
1183
92
                                            ElemIt != ElemEnd; 
++ElemIt0
) {
1184
89
        if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
1185
89
          return CS->getStmt();
1186
89
      }
1187
92
      // Workaround to detect a statement thrown out by CFGBuilder:
1188
92
      //   case X: {} case Y:
1189
92
      //   case X: ; case Y:
1190
92
      
if (const SwitchCase *3
SW3
= dyn_cast_or_null<SwitchCase>(B.getLabel()))
1191
2
        if (!isa<SwitchCase>(SW->getSubStmt()))
1192
2
          return SW->getSubStmt();
1193
1
1194
1
      return nullptr;
1195
1
    }
1196
1197
    bool FoundSwitchStatements;
1198
    AttrStmts FallthroughStmts;
1199
    Sema &S;
1200
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1201
  };
1202
} // anonymous namespace
1203
1204
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1205
51
                                            SourceLocation Loc) {
1206
51
  TokenValue FallthroughTokens[] = {
1207
51
    tok::l_square, tok::l_square,
1208
51
    PP.getIdentifierInfo("fallthrough"),
1209
51
    tok::r_square, tok::r_square
1210
51
  };
1211
51
1212
51
  TokenValue ClangFallthroughTokens[] = {
1213
51
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1214
51
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1215
51
    tok::r_square, tok::r_square
1216
51
  };
1217
51
1218
51
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17;
1219
51
1220
51
  StringRef MacroName;
1221
51
  if (PreferClangAttr)
1222
25
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1223
51
  if (MacroName.empty())
1224
44
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1225
51
  if (MacroName.empty() && 
!PreferClangAttr36
)
1226
21
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1227
51
  if (MacroName.empty())
1228
26
    MacroName = PreferClangAttr ? 
"[[clang::fallthrough]]"15
:
"[[fallthrough]]"11
;
1229
51
  return MacroName;
1230
51
}
1231
1232
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1233
113
                                            bool PerFunction) {
1234
113
  // Only perform this analysis when using [[]] attributes. There is no good
1235
113
  // workflow for this warning when not using C++11. There is no good way to
1236
113
  // silence the warning (no attribute is available) unless we are using
1237
113
  // [[]] attributes. One could use pragmas to silence the warning, but as a
1238
113
  // general solution that is gross and not in the spirit of this warning.
1239
113
  //
1240
113
  // NOTE: This an intermediate solution. There are on-going discussions on
1241
113
  // how to properly support this warning outside of C++11 with an annotation.
1242
113
  if (!AC.getASTContext().getLangOpts().DoubleSquareBracketAttributes)
1243
25
    return;
1244
88
1245
88
  FallthroughMapper FM(S);
1246
88
  FM.TraverseStmt(AC.getBody());
1247
88
1248
88
  if (!FM.foundSwitchStatements())
1249
20
    return;
1250
68
1251
68
  if (PerFunction && 
FM.getFallthroughStmts().empty()15
)
1252
1
    return;
1253
67
1254
67
  CFG *Cfg = AC.getCFG();
1255
67
1256
67
  if (!Cfg)
1257
0
    return;
1258
67
1259
67
  FM.fillReachableBlocks(Cfg);
1260
67
1261
541
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1262
541
    const Stmt *Label = B->getLabel();
1263
541
1264
541
    if (!Label || 
!isa<SwitchCase>(Label)212
)
1265
332
      continue;
1266
209
1267
209
    int AnnotatedCnt;
1268
209
1269
209
    bool IsTemplateInstantiation = false;
1270
209
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1271
206
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1272
209
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1273
209
                                      IsTemplateInstantiation))
1274
143
      continue;
1275
66
1276
66
    S.Diag(Label->getBeginLoc(),
1277
66
           PerFunction ? 
diag::warn_unannotated_fallthrough_per_function10
1278
66
                       : 
diag::warn_unannotated_fallthrough56
);
1279
66
1280
66
    if (!AnnotatedCnt) {
1281
65
      SourceLocation L = Label->getBeginLoc();
1282
65
      if (L.isMacroID())
1283
5
        continue;
1284
60
      if (S.getLangOpts().CPlusPlus11) {
1285
56
        const Stmt *Term = B->getTerminatorStmt();
1286
56
        // Skip empty cases.
1287
97
        while (B->empty() && 
!Term46
&&
B->succ_size() == 141
) {
1288
41
          B = *B->succ_begin();
1289
41
          Term = B->getTerminatorStmt();
1290
41
        }
1291
56
        if (!(B->empty() && 
Term5
&&
isa<BreakStmt>(Term)5
)) {
1292
51
          Preprocessor &PP = S.getPreprocessor();
1293
51
          StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1294
51
          SmallString<64> TextToInsert(AnnotationSpelling);
1295
51
          TextToInsert += "; ";
1296
51
          S.Diag(L, diag::note_insert_fallthrough_fixit) <<
1297
51
              AnnotationSpelling <<
1298
51
              FixItHint::CreateInsertion(L, TextToInsert);
1299
51
        }
1300
56
      }
1301
60
      S.Diag(L, diag::note_insert_break_fixit) <<
1302
60
        FixItHint::CreateInsertion(L, "break; ");
1303
60
    }
1304
66
  }
1305
67
1306
67
  for (const auto *F : FM.getFallthroughStmts())
1307
16
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1308
67
}
1309
1310
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1311
68
                     const Stmt *S) {
1312
68
  assert(S);
1313
68
1314
410
  do {
1315
410
    switch (S->getStmtClass()) {
1316
410
    case Stmt::ForStmtClass:
1317
18
    case Stmt::WhileStmtClass:
1318
18
    case Stmt::CXXForRangeStmtClass:
1319
18
    case Stmt::ObjCForCollectionStmtClass:
1320
18
      return true;
1321
18
    case Stmt::DoStmtClass: {
1322
4
      Expr::EvalResult Result;
1323
4
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1324
2
        return true;
1325
2
      return Result.Val.getInt().getBoolValue();
1326
2
    }
1327
388
    default:
1328
388
      break;
1329
388
    }
1330
388
  } while ((S = PM.getParent(S)));
1331
68
1332
68
  
return false46
;
1333
68
}
1334
1335
static void diagnoseRepeatedUseOfWeak(Sema &S,
1336
                                      const sema::FunctionScopeInfo *CurFn,
1337
                                      const Decl *D,
1338
111
                                      const ParentMap &PM) {
1339
111
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1340
111
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1341
111
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1342
111
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1343
111
  StmtUsesPair;
1344
111
1345
111
  ASTContext &Ctx = S.getASTContext();
1346
111
1347
111
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1348
111
1349
111
  // Extract all weak objects that are referenced more than once.
1350
111
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1351
111
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1352
265
       I != E; 
++I154
) {
1353
154
    const WeakUseVector &Uses = I->second;
1354
154
1355
154
    // Find the first read of the weak object.
1356
154
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1357
216
    for ( ; UI != UE; 
++UI62
) {
1358
196
      if (UI->isUnsafe())
1359
134
        break;
1360
196
    }
1361
154
1362
154
    // If there were only writes to this object, don't warn.
1363
154
    if (UI == UE)
1364
20
      continue;
1365
134
1366
134
    // If there was only one read, followed by any number of writes, and the
1367
134
    // read is not within a loop, don't warn. Additionally, don't warn in a
1368
134
    // loop if the base object is a local variable -- local variables are often
1369
134
    // changed in loops.
1370
134
    if (UI == Uses.begin()) {
1371
116
      WeakUseVector::const_iterator UI2 = UI;
1372
142
      for (++UI2; UI2 != UE; 
++UI226
)
1373
74
        if (UI2->isUnsafe())
1374
48
          break;
1375
116
1376
116
      if (UI2 == UE) {
1377
68
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1378
48
          continue;
1379
20
1380
20
        const WeakObjectProfileTy &Profile = I->first;
1381
20
        if (!Profile.isExactProfile())
1382
2
          continue;
1383
18
1384
18
        const NamedDecl *Base = Profile.getBase();
1385
18
        if (!Base)
1386
2
          Base = Profile.getProperty();
1387
18
        assert(Base && "A profile always has a base or property.");
1388
18
1389
18
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1390
18
          if (BaseVar->hasLocalStorage() && 
!isa<ParmVarDecl>(Base)16
)
1391
2
            continue;
1392
82
      }
1393
116
    }
1394
82
1395
82
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1396
82
  }
1397
111
1398
111
  if (UsesByStmt.empty())
1399
45
    return;
1400
66
1401
66
  // Sort by first use so that we emit the warnings in a deterministic order.
1402
66
  SourceManager &SM = S.getSourceManager();
1403
66
  llvm::sort(UsesByStmt,
1404
66
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1405
25
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1406
25
                                                   RHS.first->getBeginLoc());
1407
25
             });
1408
66
1409
66
  // Classify the current code body for better warning text.
1410
66
  // This enum should stay in sync with the cases in
1411
66
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1412
66
  // FIXME: Should we use a common classification enum and the same set of
1413
66
  // possibilities all throughout Sema?
1414
66
  enum {
1415
66
    Function,
1416
66
    Method,
1417
66
    Block,
1418
66
    Lambda
1419
66
  } FunctionKind;
1420
66
1421
66
  if (isa<sema::BlockScopeInfo>(CurFn))
1422
2
    FunctionKind = Block;
1423
64
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1424
0
    FunctionKind = Lambda;
1425
64
  else if (isa<ObjCMethodDecl>(D))
1426
6
    FunctionKind = Method;
1427
58
  else
1428
58
    FunctionKind = Function;
1429
66
1430
66
  // Iterate through the sorted problems and emit warnings for each.
1431
82
  for (const auto &P : UsesByStmt) {
1432
82
    const Stmt *FirstRead = P.first;
1433
82
    const WeakObjectProfileTy &Key = P.second->first;
1434
82
    const WeakUseVector &Uses = P.second->second;
1435
82
1436
82
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1437
82
    // may not contain enough information to determine that these are different
1438
82
    // properties. We can only be 100% sure of a repeated use in certain cases,
1439
82
    // and we adjust the diagnostic kind accordingly so that the less certain
1440
82
    // case can be turned off if it is too noisy.
1441
82
    unsigned DiagKind;
1442
82
    if (Key.isExactProfile())
1443
68
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1444
14
    else
1445
14
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1446
82
1447
82
    // Classify the weak object being accessed for better warning text.
1448
82
    // This enum should stay in sync with the cases in
1449
82
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1450
82
    enum {
1451
82
      Variable,
1452
82
      Property,
1453
82
      ImplicitProperty,
1454
82
      Ivar
1455
82
    } ObjectKind;
1456
82
1457
82
    const NamedDecl *KeyProp = Key.getProperty();
1458
82
    if (isa<VarDecl>(KeyProp))
1459
6
      ObjectKind = Variable;
1460
76
    else if (isa<ObjCPropertyDecl>(KeyProp))
1461
64
      ObjectKind = Property;
1462
12
    else if (isa<ObjCMethodDecl>(KeyProp))
1463
4
      ObjectKind = ImplicitProperty;
1464
8
    else if (isa<ObjCIvarDecl>(KeyProp))
1465
8
      ObjectKind = Ivar;
1466
8
    else
1467
8
      
llvm_unreachable0
("Unexpected weak object kind!");
1468
82
1469
82
    // Do not warn about IBOutlet weak property receivers being set to null
1470
82
    // since they are typically only used from the main thread.
1471
82
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1472
64
      if (Prop->hasAttr<IBOutletAttr>())
1473
4
        continue;
1474
78
1475
78
    // Show the first time the object was read.
1476
78
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1477
78
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1478
78
        << FirstRead->getSourceRange();
1479
78
1480
78
    // Print all the other accesses as notes.
1481
164
    for (const auto &Use : Uses) {
1482
164
      if (Use.getUseExpr() == FirstRead)
1483
78
        continue;
1484
86
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1485
86
             diag::note_arc_weak_also_accessed_here)
1486
86
          << Use.getUseExpr()->getSourceRange();
1487
86
    }
1488
78
  }
1489
66
}
1490
1491
namespace {
1492
class UninitValsDiagReporter : public UninitVariablesHandler {
1493
  Sema &S;
1494
  typedef SmallVector<UninitUse, 2> UsesVec;
1495
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1496
  // Prefer using MapVector to DenseMap, so that iteration order will be
1497
  // the same as insertion order. This is needed to obtain a deterministic
1498
  // order of diagnostics when calling flushDiagnostics().
1499
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1500
  UsesMap uses;
1501
1502
public:
1503
332k
  UninitValsDiagReporter(Sema &S) : S(S) {}
1504
332k
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1505
1506
728
  MappedType &getUses(const VarDecl *vd) {
1507
728
    MappedType &V = uses[vd];
1508
728
    if (!V.getPointer())
1509
472
      V.setPointer(new UsesVec());
1510
728
    return V;
1511
728
  }
1512
1513
  void handleUseOfUninitVariable(const VarDecl *vd,
1514
720
                                 const UninitUse &use) override {
1515
720
    getUses(vd).getPointer()->push_back(use);
1516
720
  }
1517
1518
8
  void handleSelfInit(const VarDecl *vd) override {
1519
8
    getUses(vd).setInt(true);
1520
8
  }
1521
1522
332k
  void flushDiagnostics() {
1523
332k
    for (const auto &P : uses) {
1524
472
      const VarDecl *vd = P.first;
1525
472
      const MappedType &V = P.second;
1526
472
1527
472
      UsesVec *vec = V.getPointer();
1528
472
      bool hasSelfInit = V.getInt();
1529
472
1530
472
      // Specially handle the case where we have uses of an uninitialized
1531
472
      // variable, but the root cause is an idiomatic self-init.  We want
1532
472
      // to report the diagnostic at the self-init since that is the root cause.
1533
472
      if (!vec->empty() && 
hasSelfInit468
&&
hasAlwaysUninitializedUse(vec)4
)
1534
2
        DiagnoseUninitializedUse(S, vd,
1535
2
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1536
2
                                           /* isAlwaysUninit */ true),
1537
2
                                 /* alwaysReportSelfInit */ true);
1538
470
      else {
1539
470
        // Sort the uses by their SourceLocations.  While not strictly
1540
470
        // guaranteed to produce them in line/column order, this will provide
1541
470
        // a stable ordering.
1542
470
        llvm::sort(vec->begin(), vec->end(),
1543
470
                   [](const UninitUse &a, const UninitUse &b) {
1544
326
          // Prefer a more confident report over a less confident one.
1545
326
          if (a.getKind() != b.getKind())
1546
2
            return a.getKind() > b.getKind();
1547
324
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1548
324
        });
1549
470
1550
470
        for (const auto &U : *vec) {
1551
466
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1552
466
          UninitUse Use = hasSelfInit ? 
UninitUse(U.getUser(), false)2
:
U464
;
1553
466
1554
466
          if (DiagnoseUninitializedUse(S, vd, Use))
1555
466
            // Skip further diagnostics for this variable. We try to warn only
1556
466
            // on the first point at which a variable is used uninitialized.
1557
466
            break;
1558
466
        }
1559
470
      }
1560
472
1561
472
      // Release the uses vector.
1562
472
      delete vec;
1563
472
    }
1564
332k
1565
332k
    uses.clear();
1566
332k
  }
1567
1568
private:
1569
4
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1570
4
    return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
1571
4
      return U.getKind() == UninitUse::Always ||
1572
4
             
U.getKind() == UninitUse::AfterCall2
||
1573
4
             
U.getKind() == UninitUse::AfterDecl2
;
1574
4
    });
1575
4
  }
1576
};
1577
} // anonymous namespace
1578
1579
namespace clang {
1580
namespace {
1581
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1582
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1583
typedef std::list<DelayedDiag> DiagList;
1584
1585
struct SortDiagBySourceLocation {
1586
  SourceManager &SM;
1587
2.23k
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1588
1589
2.20k
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1590
2.20k
    // Although this call will be slow, this is only called when outputting
1591
2.20k
    // multiple warnings.
1592
2.20k
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1593
2.20k
  }
1594
};
1595
} // anonymous namespace
1596
} // namespace clang
1597
1598
//===----------------------------------------------------------------------===//
1599
// -Wthread-safety
1600
//===----------------------------------------------------------------------===//
1601
namespace clang {
1602
namespace threadSafety {
1603
namespace {
1604
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1605
  Sema &S;
1606
  DiagList Warnings;
1607
  SourceLocation FunLocation, FunEndLocation;
1608
1609
  const FunctionDecl *CurrentFunction;
1610
  bool Verbose;
1611
1612
2.01k
  OptionalNotes getNotes() const {
1613
2.01k
    if (Verbose && 
CurrentFunction14
) {
1614
14
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1615
14
                                S.PDiag(diag::note_thread_warning_in_fun)
1616
14
                                    << CurrentFunction);
1617
14
      return OptionalNotes(1, FNote);
1618
14
    }
1619
2.00k
    return OptionalNotes();
1620
2.00k
  }
1621
1622
376
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1623
376
    OptionalNotes ONS(1, Note);
1624
376
    if (Verbose && 
CurrentFunction6
) {
1625
6
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1626
6
                                S.PDiag(diag::note_thread_warning_in_fun)
1627
6
                                    << CurrentFunction);
1628
6
      ONS.push_back(std::move(FNote));
1629
6
    }
1630
376
    return ONS;
1631
376
  }
1632
1633
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1634
0
                         const PartialDiagnosticAt &Note2) const {
1635
0
    OptionalNotes ONS;
1636
0
    ONS.push_back(Note1);
1637
0
    ONS.push_back(Note2);
1638
0
    if (Verbose && CurrentFunction) {
1639
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1640
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1641
0
                                    << CurrentFunction);
1642
0
      ONS.push_back(std::move(FNote));
1643
0
    }
1644
0
    return ONS;
1645
0
  }
1646
1647
274
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1648
274
    return LocLocked.isValid()
1649
274
               ? getNotes(PartialDiagnosticAt(
1650
274
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1651
274
               : 
getNotes()0
;
1652
274
  }
1653
1654
 public:
1655
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1656
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1657
2.14k
      CurrentFunction(nullptr), Verbose(false) {}
1658
1659
40
  void setVerbose(bool b) { Verbose = b; }
1660
1661
  /// Emit all buffered diagnostics in order of sourcelocation.
1662
  /// We need to output diagnostics produced while iterating through
1663
  /// the lockset in deterministic order, so this function orders diagnostics
1664
  /// and outputs them.
1665
2.14k
  void emitDiagnostics() {
1666
2.14k
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1667
2.39k
    for (const auto &Diag : Warnings) {
1668
2.39k
      S.Diag(Diag.first.first, Diag.first.second);
1669
2.39k
      for (const auto &Note : Diag.second)
1670
396
        S.Diag(Note.first, Note.second);
1671
2.39k
    }
1672
2.14k
  }
1673
1674
0
  void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1675
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1676
0
                                         << Loc);
1677
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1678
0
  }
1679
1680
  void handleUnmatchedUnlock(StringRef Kind, Name LockName,
1681
100
                             SourceLocation Loc) override {
1682
100
    if (Loc.isInvalid())
1683
0
      Loc = FunLocation;
1684
100
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1685
100
                                         << Kind << LockName);
1686
100
    Warnings.emplace_back(std::move(Warning), getNotes());
1687
100
  }
1688
1689
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1690
                                 LockKind Expected, LockKind Received,
1691
                                 SourceLocation LocLocked,
1692
18
                                 SourceLocation LocUnlock) override {
1693
18
    if (LocUnlock.isInvalid())
1694
0
      LocUnlock = FunLocation;
1695
18
    PartialDiagnosticAt Warning(
1696
18
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1697
18
                       << Kind << LockName << Received << Expected);
1698
18
    Warnings.emplace_back(std::move(Warning),
1699
18
                          makeLockedHereNote(LocLocked, Kind));
1700
18
  }
1701
1702
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1703
79
                        SourceLocation LocDoubleLock) override {
1704
79
    if (LocDoubleLock.isInvalid())
1705
0
      LocDoubleLock = FunLocation;
1706
79
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1707
79
                                                   << Kind << LockName);
1708
79
    Warnings.emplace_back(std::move(Warning),
1709
79
                          makeLockedHereNote(LocLocked, Kind));
1710
79
  }
1711
1712
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1713
                                 SourceLocation LocLocked,
1714
                                 SourceLocation LocEndOfScope,
1715
177
                                 LockErrorKind LEK) override {
1716
177
    unsigned DiagID = 0;
1717
177
    switch (LEK) {
1718
177
      case LEK_LockedSomePredecessors:
1719
46
        DiagID = diag::warn_lock_some_predecessors;
1720
46
        break;
1721
177
      case LEK_LockedSomeLoopIterations:
1722
34
        DiagID = diag::warn_expecting_lock_held_on_loop;
1723
34
        break;
1724
177
      case LEK_LockedAtEndOfFunction:
1725
65
        DiagID = diag::warn_no_unlock;
1726
65
        break;
1727
177
      case LEK_NotLockedAtEndOfFunction:
1728
32
        DiagID = diag::warn_expecting_locked;
1729
32
        break;
1730
177
    }
1731
177
    if (LocEndOfScope.isInvalid())
1732
127
      LocEndOfScope = FunEndLocation;
1733
177
1734
177
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1735
177
                                                               << LockName);
1736
177
    Warnings.emplace_back(std::move(Warning),
1737
177
                          makeLockedHereNote(LocLocked, Kind));
1738
177
  }
1739
1740
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1741
                                SourceLocation Loc1,
1742
20
                                SourceLocation Loc2) override {
1743
20
    PartialDiagnosticAt Warning(Loc1,
1744
20
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1745
20
                                    << Kind << LockName);
1746
20
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1747
20
                                       << Kind << LockName);
1748
20
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1749
20
  }
1750
1751
  void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1752
                         ProtectedOperationKind POK, AccessKind AK,
1753
34
                         SourceLocation Loc) override {
1754
34
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1755
34
           "Only works for variables");
1756
34
    unsigned DiagID = POK == POK_VarAccess?
1757
25
                        diag::warn_variable_requires_any_lock:
1758
34
                        
diag::warn_var_deref_requires_any_lock9
;
1759
34
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1760
34
      << D << getLockKindFromAccessKind(AK));
1761
34
    Warnings.emplace_back(std::move(Warning), getNotes());
1762
34
  }
1763
1764
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1765
                          ProtectedOperationKind POK, Name LockName,
1766
                          LockKind LK, SourceLocation Loc,
1767
1.17k
                          Name *PossibleMatch) override {
1768
1.17k
    unsigned DiagID = 0;
1769
1.17k
    if (PossibleMatch) {
1770
76
      switch (POK) {
1771
76
        case POK_VarAccess:
1772
56
          DiagID = diag::warn_variable_requires_lock_precise;
1773
56
          break;
1774
76
        case POK_VarDereference:
1775
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1776
0
          break;
1777
76
        case POK_FunctionCall:
1778
20
          DiagID = diag::warn_fun_requires_lock_precise;
1779
20
          break;
1780
76
        case POK_PassByRef:
1781
0
          DiagID = diag::warn_guarded_pass_by_reference;
1782
0
          break;
1783
76
        case POK_PtPassByRef:
1784
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1785
0
          break;
1786
76
      }
1787
76
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1788
76
                                                       << D
1789
76
                                                       << LockName << LK);
1790
76
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1791
76
                                        << *PossibleMatch);
1792
76
      if (Verbose && 
POK == POK_VarAccess0
) {
1793
0
        PartialDiagnosticAt VNote(D->getLocation(),
1794
0
                                 S.PDiag(diag::note_guarded_by_declared_here)
1795
0
                                     << D->getNameAsString());
1796
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1797
0
      } else
1798
76
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1799
1.09k
    } else {
1800
1.09k
      switch (POK) {
1801
1.09k
        case POK_VarAccess:
1802
574
          DiagID = diag::warn_variable_requires_lock;
1803
574
          break;
1804
1.09k
        case POK_VarDereference:
1805
204
          DiagID = diag::warn_var_deref_requires_lock;
1806
204
          break;
1807
1.09k
        case POK_FunctionCall:
1808
212
          DiagID = diag::warn_fun_requires_lock;
1809
212
          break;
1810
1.09k
        case POK_PassByRef:
1811
88
          DiagID = diag::warn_guarded_pass_by_reference;
1812
88
          break;
1813
1.09k
        case POK_PtPassByRef:
1814
20
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1815
20
          break;
1816
1.09k
      }
1817
1.09k
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1818
1.09k
                                                       << D
1819
1.09k
                                                       << LockName << LK);
1820
1.09k
      if (Verbose && 
POK == POK_VarAccess12
) {
1821
6
        PartialDiagnosticAt Note(D->getLocation(),
1822
6
                                 S.PDiag(diag::note_guarded_by_declared_here));
1823
6
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1824
6
      } else
1825
1.09k
        Warnings.emplace_back(std::move(Warning), getNotes());
1826
1.09k
    }
1827
1.17k
  }
1828
1829
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
1830
643
                             SourceLocation Loc) override {
1831
643
    PartialDiagnosticAt Warning(Loc,
1832
643
        S.PDiag(diag::warn_acquire_requires_negative_cap)
1833
643
        << Kind << LockName << Neg);
1834
643
    Warnings.emplace_back(std::move(Warning), getNotes());
1835
643
  }
1836
1837
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
1838
77
                             SourceLocation Loc) override {
1839
77
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
1840
77
                                         << Kind << FunName << LockName);
1841
77
    Warnings.emplace_back(std::move(Warning), getNotes());
1842
77
  }
1843
1844
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
1845
49
                                SourceLocation Loc) override {
1846
49
    PartialDiagnosticAt Warning(Loc,
1847
49
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
1848
49
    Warnings.emplace_back(std::move(Warning), getNotes());
1849
49
  }
1850
1851
20
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
1852
20
    PartialDiagnosticAt Warning(Loc,
1853
20
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
1854
20
    Warnings.emplace_back(std::move(Warning), getNotes());
1855
20
  }
1856
1857
2.02k
  void enterFunction(const FunctionDecl* FD) override {
1858
2.02k
    CurrentFunction = FD;
1859
2.02k
  }
1860
1861
1.96k
  void leaveFunction(const FunctionDecl* FD) override {
1862
1.96k
    CurrentFunction = nullptr;
1863
1.96k
  }
1864
};
1865
} // anonymous namespace
1866
} // namespace threadSafety
1867
} // namespace clang
1868
1869
//===----------------------------------------------------------------------===//
1870
// -Wconsumed
1871
//===----------------------------------------------------------------------===//
1872
1873
namespace clang {
1874
namespace consumed {
1875
namespace {
1876
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
1877
1878
  Sema &S;
1879
  DiagList Warnings;
1880
1881
public:
1882
1883
94
  ConsumedWarningsHandler(Sema &S) : S(S) {}
1884
1885
91
  void emitDiagnostics() override {
1886
91
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1887
109
    for (const auto &Diag : Warnings) {
1888
109
      S.Diag(Diag.first.first, Diag.first.second);
1889
109
      for (const auto &Note : Diag.second)
1890
0
        S.Diag(Note.first, Note.second);
1891
109
    }
1892
91
  }
1893
1894
  void warnLoopStateMismatch(SourceLocation Loc,
1895
2
                             StringRef VariableName) override {
1896
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
1897
2
      VariableName);
1898
2
1899
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1900
2
  }
1901
1902
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
1903
                                        StringRef VariableName,
1904
                                        StringRef ExpectedState,
1905
2
                                        StringRef ObservedState) override {
1906
2
1907
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1908
2
      diag::warn_param_return_typestate_mismatch) << VariableName <<
1909
2
        ExpectedState << ObservedState);
1910
2
1911
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1912
2
  }
1913
1914
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1915
6
                                  StringRef ObservedState) override {
1916
6
1917
6
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1918
6
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
1919
6
1920
6
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1921
6
  }
1922
1923
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
1924
1
                                              StringRef TypeName) override {
1925
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1926
1
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
1927
1
1928
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1929
1
  }
1930
1931
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1932
1
                                   StringRef ObservedState) override {
1933
1
1934
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1935
1
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
1936
1
1937
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1938
1
  }
1939
1940
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
1941
3
                                   SourceLocation Loc) override {
1942
3
1943
3
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1944
3
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
1945
3
1946
3
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1947
3
  }
1948
1949
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
1950
94
                             StringRef State, SourceLocation Loc) override {
1951
94
1952
94
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
1953
94
                                MethodName << VariableName << State);
1954
94
1955
94
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1956
94
  }
1957
};
1958
} // anonymous namespace
1959
} // namespace consumed
1960
} // namespace clang
1961
1962
//===----------------------------------------------------------------------===//
1963
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1964
//  warnings on a function, method, or block.
1965
//===----------------------------------------------------------------------===//
1966
1967
41.5k
clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1968
41.5k
  enableCheckFallThrough = 1;
1969
41.5k
  enableCheckUnreachable = 0;
1970
41.5k
  enableThreadSafetyAnalysis = 0;
1971
41.5k
  enableConsumedAnalysis = 0;
1972
41.5k
}
1973
1974
249k
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
1975
249k
  return (unsigned)!D.isIgnored(diag, SourceLocation());
1976
249k
}
1977
1978
clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1979
  : S(s),
1980
    NumFunctionsAnalyzed(0),
1981
    NumFunctionsWithBadCFGs(0),
1982
    NumCFGBlocks(0),
1983
    MaxCFGBlocksPerFunction(0),
1984
    NumUninitAnalysisFunctions(0),
1985
    NumUninitAnalysisVariables(0),
1986
    MaxUninitAnalysisVariablesPerFunction(0),
1987
    NumUninitAnalysisBlockVisits(0),
1988
41.5k
    MaxUninitAnalysisBlockVisitsPerFunction(0) {
1989
41.5k
1990
41.5k
  using namespace diag;
1991
41.5k
  DiagnosticsEngine &D = S.getDiagnostics();
1992
41.5k
1993
41.5k
  DefaultPolicy.enableCheckUnreachable =
1994
41.5k
    isEnabled(D, warn_unreachable) ||
1995
41.5k
    
isEnabled(D, warn_unreachable_break)41.5k
||
1996
41.5k
    
isEnabled(D, warn_unreachable_return)41.5k
||
1997
41.5k
    
isEnabled(D, warn_unreachable_loop_increment)41.5k
;
1998
41.5k
1999
41.5k
  DefaultPolicy.enableThreadSafetyAnalysis =
2000
41.5k
    isEnabled(D, warn_double_lock);
2001
41.5k
2002
41.5k
  DefaultPolicy.enableConsumedAnalysis =
2003
41.5k
    isEnabled(D, warn_use_in_invalid_state);
2004
41.5k
}
2005
2006
500
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2007
500
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2008
0
    S.Diag(D.Loc, D.PD);
2009
500
}
2010
2011
void clang::sema::
2012
AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
2013
                                     sema::FunctionScopeInfo *fscope,
2014
3.02M
                                     const Decl *D, QualType BlockType) {
2015
3.02M
2016
3.02M
  // We avoid doing analysis-based warnings when there are errors for
2017
3.02M
  // two reasons:
2018
3.02M
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2019
3.02M
  //     don't bother trying.
2020
3.02M
  // (2) The code already has problems; running the analysis just takes more
2021
3.02M
  //     time.
2022
3.02M
  DiagnosticsEngine &Diags = S.getDiagnostics();
2023
3.02M
2024
3.02M
  // Do not do any analysis if we are going to just ignore them.
2025
3.02M
  if (Diags.getIgnoreAllWarnings() ||
2026
3.02M
      
(2.09M
Diags.getSuppressSystemWarnings()2.09M
&&
2027
2.09M
       
S.SourceMgr.isInSystemHeader(D->getLocation())2.09M
))
2028
2.34M
    return;
2029
675k
2030
675k
  // For code in dependent contexts, we'll do this at instantiation time.
2031
675k
  if (cast<DeclContext>(D)->isDependentContext())
2032
237k
    return;
2033
437k
2034
437k
  if (Diags.hasUncompilableErrorOccurred()) {
2035
500
    // Flush out any possibly unreachable diagnostics.
2036
500
    flushDiagnostics(S, fscope);
2037
500
    return;
2038
500
  }
2039
437k
2040
437k
  const Stmt *Body = D->getBody();
2041
437k
  assert(Body);
2042
437k
2043
437k
  // Construct the analysis context with the specified CFG build options.
2044
437k
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2045
437k
2046
437k
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2047
437k
  // explosion for destructors that can result and the compile time hit.
2048
437k
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2049
437k
  AC.getCFGBuildOptions().AddEHEdges = false;
2050
437k
  AC.getCFGBuildOptions().AddInitializers = true;
2051
437k
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2052
437k
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2053
437k
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2054
437k
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2055
437k
2056
437k
  // Force that certain expressions appear as CFGElements in the CFG.  This
2057
437k
  // is used to speed up various analyses.
2058
437k
  // FIXME: This isn't the right factoring.  This is here for initial
2059
437k
  // prototyping, but we need a way for analyses to say what expressions they
2060
437k
  // expect to always be CFGElements and then fill in the BuildOptions
2061
437k
  // appropriately.  This is essentially a layering violation.
2062
437k
  if (P.enableCheckUnreachable || 
P.enableThreadSafetyAnalysis437k
||
2063
437k
      
P.enableConsumedAnalysis435k
) {
2064
2.35k
    // Unreachable code analysis and thread safety require a linearized CFG.
2065
2.35k
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2066
2.35k
  }
2067
435k
  else {
2068
435k
    AC.getCFGBuildOptions()
2069
435k
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2070
435k
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2071
435k
      .setAlwaysAdd(Stmt::BlockExprClass)
2072
435k
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2073
435k
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2074
435k
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2075
435k
      .setAlwaysAdd(Stmt::UnaryOperatorClass)
2076
435k
      .setAlwaysAdd(Stmt::AttributedStmtClass);
2077
435k
  }
2078
437k
2079
437k
  // Install the logical handler for -Wtautological-overlap-compare
2080
437k
  llvm::Optional<LogicalErrorHandler> LEH;
2081
437k
  if (!Diags.isIgnored(diag::warn_tautological_overlap_comparison,
2082
437k
                       D->getBeginLoc())) {
2083
42
    LEH.emplace(S);
2084
42
    AC.getCFGBuildOptions().Observer = &*LEH;
2085
42
  }
2086
437k
2087
437k
  // Emit delayed diagnostics.
2088
437k
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2089
4.75k
    bool analyzed = false;
2090
4.75k
2091
4.75k
    // Register the expressions with the CFGBuilder.
2092
9.43k
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2093
9.43k
      for (const Stmt *S : D.Stmts)
2094
9.60k
        AC.registerForcedBlockExpression(S);
2095
9.43k
    }
2096
4.75k
2097
4.75k
    if (AC.getCFG()) {
2098
4.75k
      analyzed = true;
2099
9.43k
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2100
9.43k
        bool AllReachable = true;
2101
9.60k
        for (const Stmt *S : D.Stmts) {
2102
9.60k
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2103
9.60k
          CFGReverseBlockReachabilityAnalysis *cra =
2104
9.60k
              AC.getCFGReachablityAnalysis();
2105
9.60k
          // FIXME: We should be able to assert that block is non-null, but
2106
9.60k
          // the CFG analysis can skip potentially-evaluated expressions in
2107
9.60k
          // edge cases; see test/Sema/vla-2.c.
2108
9.60k
          if (block && 
cra5.92k
) {
2109
5.92k
            // Can this block be reached from the entrance?
2110
5.92k
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2111
156
              AllReachable = false;
2112
156
              break;
2113
156
            }
2114
5.92k
          }
2115
9.60k
          // If we cannot map to a basic block, assume the statement is
2116
9.60k
          // reachable.
2117
9.60k
        }
2118
9.43k
2119
9.43k
        if (AllReachable)
2120
9.28k
          S.Diag(D.Loc, D.PD);
2121
9.43k
      }
2122
4.75k
    }
2123
4.75k
2124
4.75k
    if (!analyzed)
2125
0
      flushDiagnostics(S, fscope);
2126
4.75k
  }
2127
437k
2128
437k
  // Warning: check missing 'return'
2129
437k
  if (P.enableCheckFallThrough) {
2130
434k
    const CheckFallThroughDiagnostics &CD =
2131
434k
        (isa<BlockDecl>(D)
2132
434k
             ? 
CheckFallThroughDiagnostics::MakeForBlock()2.11k
2133
434k
             : 
(432k
isa<CXXMethodDecl>(D)432k
&&
2134
432k
                
cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call219k
&&
2135
432k
                
cast<CXXMethodDecl>(D)->getParent()->isLambda()2.80k
)
2136
432k
                   ? 
CheckFallThroughDiagnostics::MakeForLambda()2.61k
2137
432k
                   : (fscope->isCoroutine()
2138
429k
                          ? 
CheckFallThroughDiagnostics::MakeForCoroutine(D)65
2139
429k
                          : 
CheckFallThroughDiagnostics::MakeForFunction(D)429k
));
2140
434k
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2141
434k
  }
2142
437k
2143
437k
  // Warning: check for unreachable code
2144
437k
  if (P.enableCheckUnreachable) {
2145
161
    // Only check for unreachable code on non-template instantiations.
2146
161
    // Different template instantiations can effectively change the control-flow
2147
161
    // and it is very difficult to prove that a snippet of code in a template
2148
161
    // is unreachable for all instantiations.
2149
161
    bool isTemplateInstantiation = false;
2150
161
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2151
157
      isTemplateInstantiation = Function->isTemplateInstantiation();
2152
161
    if (!isTemplateInstantiation)
2153
157
      CheckUnreachable(S, AC);
2154
161
  }
2155
437k
2156
437k
  // Check for thread safety violations
2157
437k
  if (P.enableThreadSafetyAnalysis) {
2158
2.14k
    SourceLocation FL = AC.getDecl()->getLocation();
2159
2.14k
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2160
2.14k
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2161
2.14k
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2162
2.06k
      Reporter.setIssueBetaWarnings(true);
2163
2.14k
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2164
40
      Reporter.setVerbose(true);
2165
2.14k
2166
2.14k
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2167
2.14k
                                          &S.ThreadSafetyDeclCache);
2168
2.14k
    Reporter.emitDiagnostics();
2169
2.14k
  }
2170
437k
2171
437k
  // Check for violations of consumed properties.
2172
437k
  if (P.enableConsumedAnalysis) {
2173
94
    consumed::ConsumedWarningsHandler WarningHandler(S);
2174
94
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2175
94
    Analyzer.run(AC);
2176
94
  }
2177
437k
2178
437k
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2179
437k
      
!Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc())104k
||
2180
437k
      
!Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc())104k
) {
2181
332k
    if (CFG *cfg = AC.getCFG()) {
2182
332k
      UninitValsDiagReporter reporter(S);
2183
332k
      UninitVariablesAnalysisStats stats;
2184
332k
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2185
332k
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2186
332k
                                        reporter, stats);
2187
332k
2188
332k
      if (S.CollectStats && 
stats.NumVariablesAnalyzed > 00
) {
2189
0
        ++NumUninitAnalysisFunctions;
2190
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2191
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2192
0
        MaxUninitAnalysisVariablesPerFunction =
2193
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2194
0
                     stats.NumVariablesAnalyzed);
2195
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2196
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2197
0
                     stats.NumBlockVisits);
2198
0
      }
2199
332k
    }
2200
332k
  }
2201
437k
2202
437k
  bool FallThroughDiagFull =
2203
437k
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2204
437k
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2205
437k
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2206
437k
  if (FallThroughDiagFull || 
FallThroughDiagPerFunction437k
||
2207
437k
      
fscope->HasFallthroughStmt437k
) {
2208
113
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2209
113
  }
2210
437k
2211
437k
  if (S.getLangOpts().ObjCWeak &&
2212
437k
      
!Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc())1.31k
)
2213
111
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2214
437k
2215
437k
2216
437k
  // Check for infinite self-recursion in functions
2217
437k
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2218
437k
                       D->getBeginLoc())) {
2219
325k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2220
324k
      checkRecursiveFunction(S, FD, Body, AC);
2221
324k
    }
2222
325k
  }
2223
437k
2224
437k
  // Check for throw out of non-throwing function.
2225
437k
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2226
437k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2227
430k
      if (S.getLangOpts().CPlusPlus && 
isNoexcept(FD)372k
)
2228
14.4k
        checkThrowInNonThrowingFunc(S, FD, AC);
2229
437k
2230
437k
  // If none of the previous checks caused a CFG build, trigger one here
2231
437k
  // for -Wtautological-overlap-compare
2232
437k
  if (!Diags.isIgnored(diag::warn_tautological_overlap_comparison,
2233
437k
                       D->getBeginLoc())) {
2234
42
    AC.getCFG();
2235
42
  }
2236
437k
2237
437k
  // Collect statistics about the CFG if it was built.
2238
437k
  if (S.CollectStats && 
AC.isCFGBuilt()3
) {
2239
3
    ++NumFunctionsAnalyzed;
2240
3
    if (CFG *cfg = AC.getCFG()) {
2241
3
      // If we successfully built a CFG for this context, record some more
2242
3
      // detail information about it.
2243
3
      NumCFGBlocks += cfg->getNumBlockIDs();
2244
3
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2245
3
                                         cfg->getNumBlockIDs());
2246
3
    } else {
2247
0
      ++NumFunctionsWithBadCFGs;
2248
0
    }
2249
3
  }
2250
437k
}
2251
2252
3
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2253
3
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2254
3
2255
3
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2256
3
  unsigned AvgCFGBlocksPerFunction =
2257
3
      !NumCFGsBuilt ? 
00
: NumCFGBlocks/NumCFGsBuilt;
2258
3
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2259
3
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2260
3
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2261
3
               << "  " << AvgCFGBlocksPerFunction
2262
3
               << " average CFG blocks per function.\n"
2263
3
               << "  " << MaxCFGBlocksPerFunction
2264
3
               << " max CFG blocks per function.\n";
2265
3
2266
3
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2267
3
      : 
NumUninitAnalysisVariables/NumUninitAnalysisFunctions0
;
2268
3
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2269
3
      : 
NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions0
;
2270
3
  llvm::errs() << NumUninitAnalysisFunctions
2271
3
               << " functions analyzed for uninitialiazed variables\n"
2272
3
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2273
3
               << "  " << AvgUninitVariablesPerFunction
2274
3
               << " average variables per function.\n"
2275
3
               << "  " << MaxUninitAnalysisVariablesPerFunction
2276
3
               << " max variables per function.\n"
2277
3
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2278
3
               << "  " << AvgUninitBlockVisitsPerFunction
2279
3
               << " average block visits per function.\n"
2280
3
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2281
3
               << " max block visits per function.\n";
2282
3
}