Coverage Report

Created: 2020-02-18 08:44

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/DeclCXX.h"
17
#include "clang/AST/DeclObjC.h"
18
#include "clang/AST/EvaluatedExprVisitor.h"
19
#include "clang/AST/ExprCXX.h"
20
#include "clang/AST/ExprObjC.h"
21
#include "clang/AST/ParentMap.h"
22
#include "clang/AST/RecursiveASTVisitor.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/AST/StmtObjC.h"
25
#include "clang/AST/StmtVisitor.h"
26
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27
#include "clang/Analysis/Analyses/Consumed.h"
28
#include "clang/Analysis/Analyses/ReachableCode.h"
29
#include "clang/Analysis/Analyses/ThreadSafety.h"
30
#include "clang/Analysis/Analyses/UninitializedValues.h"
31
#include "clang/Analysis/AnalysisDeclContext.h"
32
#include "clang/Analysis/CFG.h"
33
#include "clang/Analysis/CFGStmtMap.h"
34
#include "clang/Basic/SourceLocation.h"
35
#include "clang/Basic/SourceManager.h"
36
#include "clang/Lex/Preprocessor.h"
37
#include "clang/Sema/ScopeInfo.h"
38
#include "clang/Sema/SemaInternal.h"
39
#include "llvm/ADT/BitVector.h"
40
#include "llvm/ADT/MapVector.h"
41
#include "llvm/ADT/SmallString.h"
42
#include "llvm/ADT/SmallVector.h"
43
#include "llvm/ADT/StringRef.h"
44
#include "llvm/Support/Casting.h"
45
#include <algorithm>
46
#include <deque>
47
#include <iterator>
48
49
using namespace clang;
50
51
//===----------------------------------------------------------------------===//
52
// Unreachable code analysis.
53
//===----------------------------------------------------------------------===//
54
55
namespace {
56
  class UnreachableCodeHandler : public reachable_code::Callback {
57
    Sema &S;
58
    SourceRange PreviousSilenceableCondVal;
59
60
  public:
61
155
    UnreachableCodeHandler(Sema &s) : S(s) {}
62
63
    void HandleUnreachable(reachable_code::UnreachableKind UK,
64
                           SourceLocation L,
65
                           SourceRange SilenceableCondVal,
66
                           SourceRange R1,
67
170
                           SourceRange R2) override {
68
170
      // Avoid reporting multiple unreachable code diagnostics that are
69
170
      // triggered by the same conditional value.
70
170
      if (PreviousSilenceableCondVal.isValid() &&
71
170
          
SilenceableCondVal.isValid()25
&&
72
170
          
PreviousSilenceableCondVal == SilenceableCondVal25
)
73
2
        return;
74
168
      PreviousSilenceableCondVal = SilenceableCondVal;
75
168
76
168
      unsigned diag = diag::warn_unreachable;
77
168
      switch (UK) {
78
16
        case reachable_code::UK_Break:
79
16
          diag = diag::warn_unreachable_break;
80
16
          break;
81
32
        case reachable_code::UK_Return:
82
32
          diag = diag::warn_unreachable_return;
83
32
          break;
84
3
        case reachable_code::UK_Loop_Increment:
85
3
          diag = diag::warn_unreachable_loop_increment;
86
3
          break;
87
117
        case reachable_code::UK_Other:
88
117
          break;
89
168
      }
90
168
91
168
      S.Diag(L, diag) << R1 << R2;
92
168
93
168
      SourceLocation Open = SilenceableCondVal.getBegin();
94
168
      if (Open.isValid()) {
95
43
        SourceLocation Close = SilenceableCondVal.getEnd();
96
43
        Close = S.getLocForEndOfToken(Close);
97
43
        if (Close.isValid()) {
98
43
          S.Diag(Open, diag::note_unreachable_silence)
99
43
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
100
43
            << FixItHint::CreateInsertion(Close, ")");
101
43
        }
102
43
      }
103
168
    }
104
  };
105
} // anonymous namespace
106
107
/// CheckUnreachable - Check for unreachable code.
108
157
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
109
157
  // As a heuristic prune all diagnostics not in the main file.  Currently
110
157
  // the majority of warnings in headers are false positives.  These
111
157
  // are largely caused by configuration state, e.g. preprocessor
112
157
  // defined code, etc.
113
157
  //
114
157
  // Note that this is also a performance optimization.  Analyzing
115
157
  // headers many times can be expensive.
116
157
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
117
2
    return;
118
155
119
155
  UnreachableCodeHandler UC(S);
120
155
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
121
155
}
122
123
namespace {
124
/// Warn on logical operator errors in CFGBuilder
125
class LogicalErrorHandler : public CFGCallback {
126
  Sema &S;
127
128
public:
129
11.7k
  LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
130
131
1.22k
  static bool HasMacroID(const Expr *E) {
132
1.22k
    if (E->getExprLoc().isMacroID())
133
6
      return true;
134
1.21k
135
1.21k
    // Recurse to children.
136
1.21k
    for (const Stmt *SubStmt : E->children())
137
1.07k
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
138
1.07k
        if (HasMacroID(SubExpr))
139
16
          return true;
140
1.21k
141
1.21k
    
return false1.19k
;
142
1.21k
  }
143
144
104
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
145
104
    if (HasMacroID(B))
146
2
      return;
147
102
148
102
    SourceRange DiagRange = B->getSourceRange();
149
102
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
150
102
        << DiagRange << isAlwaysTrue;
151
102
  }
152
153
  void compareBitwiseEquality(const BinaryOperator *B,
154
28
                              bool isAlwaysTrue) override {
155
28
    if (HasMacroID(B))
156
4
      return;
157
24
158
24
    SourceRange DiagRange = B->getSourceRange();
159
24
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
160
24
        << DiagRange << isAlwaysTrue;
161
24
  }
162
163
16
  void compareBitwiseOr(const BinaryOperator *B) override {
164
16
    if (HasMacroID(B))
165
0
      return;
166
16
167
16
    SourceRange DiagRange = B->getSourceRange();
168
16
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
169
16
  }
170
171
  static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
172
321k
                                   SourceLocation Loc) {
173
321k
    return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
174
321k
           
!Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc)298k
;
175
321k
  }
176
};
177
} // anonymous namespace
178
179
//===----------------------------------------------------------------------===//
180
// Check for infinite self-recursion in functions
181
//===----------------------------------------------------------------------===//
182
183
// Returns true if the function is called anywhere within the CFGBlock.
184
// For member functions, the additional condition of being call from the
185
// this pointer is required.
186
13.6k
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
187
13.6k
  // Process all the Stmt's in this block to find any calls to FD.
188
200k
  for (const auto &B : Block) {
189
200k
    if (B.getKind() != CFGElement::Statement)
190
740
      continue;
191
199k
192
199k
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
193
199k
    if (!CE || 
!CE->getCalleeDecl()18.2k
||
194
199k
        
CE->getCalleeDecl()->getCanonicalDecl() != FD18.2k
)
195
199k
      continue;
196
17
197
17
    // Skip function calls which are qualified with a templated class.
198
17
    if (const DeclRefExpr *DRE =
199
14
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
200
14
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
201
1
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
202
1
            isa<TemplateSpecializationType>(NNS->getAsType())) {
203
1
          continue;
204
1
        }
205
16
      }
206
14
    }
207
16
208
16
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
209
16
    if (!MCE || 
isa<CXXThisExpr>(MCE->getImplicitObjectArgument())3
||
210
16
        
!MCE->getMethodDecl()->isVirtual()1
)
211
16
      return true;
212
16
  }
213
13.6k
  
return false13.6k
;
214
13.6k
}
215
216
// Returns true if every path from the entry block passes through a call to FD.
217
11.7k
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
218
11.7k
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
219
11.7k
  llvm::SmallVector<CFGBlock *, 16> WorkList;
220
11.7k
  // Keep track of whether we found at least one recursive path.
221
11.7k
  bool foundRecursion = false;
222
11.7k
223
11.7k
  const unsigned ExitID = cfg->getExit().getBlockID();
224
11.7k
225
11.7k
  // Seed the work list with the entry block.
226
11.7k
  WorkList.push_back(&cfg->getEntry());
227
11.7k
228
24.8k
  while (!WorkList.empty()) {
229
24.8k
    CFGBlock *Block = WorkList.pop_back_val();
230
24.8k
231
38.9k
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; 
++I14.1k
) {
232
25.8k
      if (CFGBlock *SuccBlock = *I) {
233
25.4k
        if (!Visited.insert(SuccBlock).second)
234
54
          continue;
235
25.3k
236
25.3k
        // Found a path to the exit node without a recursive call.
237
25.3k
        if (ExitID == SuccBlock->getBlockID())
238
11.7k
          return false;
239
13.6k
240
13.6k
        // If the successor block contains a recursive call, end analysis there.
241
13.6k
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
242
16
          foundRecursion = true;
243
16
          continue;
244
16
        }
245
13.6k
246
13.6k
        WorkList.push_back(SuccBlock);
247
13.6k
      }
248
25.8k
    }
249
24.8k
  }
250
11.7k
  
return foundRecursion15
;
251
11.7k
}
252
253
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
254
11.7k
                                   const Stmt *Body, AnalysisDeclContext &AC) {
255
11.7k
  FD = FD->getCanonicalDecl();
256
11.7k
257
11.7k
  // Only run on non-templated functions and non-templated members of
258
11.7k
  // templated classes.
259
11.7k
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
260
11.7k
      
FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization47
)
261
23
    return;
262
11.7k
263
11.7k
  CFG *cfg = AC.getCFG();
264
11.7k
  if (!cfg) 
return0
;
265
11.7k
266
11.7k
  // If the exit block is unreachable, skip processing the function.
267
11.7k
  if (cfg->getExit().pred_empty())
268
5
    return;
269
11.7k
270
11.7k
  // Emit diagnostic if a recursive function call is detected for all paths.
271
11.7k
  if (checkForRecursiveFunctionCall(FD, cfg))
272
13
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
273
11.7k
}
274
275
//===----------------------------------------------------------------------===//
276
// Check for throw in a non-throwing function.
277
//===----------------------------------------------------------------------===//
278
279
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
280
/// can reach ExitBlock.
281
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
282
81
                         CFG *Body) {
283
81
  SmallVector<CFGBlock *, 16> Stack;
284
81
  llvm::BitVector Queued(Body->getNumBlockIDs());
285
81
286
81
  Stack.push_back(&ThrowBlock);
287
81
  Queued[ThrowBlock.getBlockID()] = true;
288
81
289
173
  while (!Stack.empty()) {
290
138
    CFGBlock &UnwindBlock = *Stack.back();
291
138
    Stack.pop_back();
292
138
293
160
    for (auto &Succ : UnwindBlock.succs()) {
294
160
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
295
0
        continue;
296
160
297
160
      if (Succ->getBlockID() == Body->getExit().getBlockID())
298
46
        return true;
299
114
300
114
      if (auto *Catch =
301
57
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
302
57
        QualType Caught = Catch->getCaughtType();
303
57
        if (Caught.isNull() || // catch (...) catches everything
304
57
            
!E->getSubExpr()53
|| // throw; is considered cuaght by any handler
305
57
            
S.handlerCanCatch(Caught, E->getSubExpr()->getType())50
)
306
35
          // Exception doesn't escape via this path.
307
35
          break;
308
57
      } else {
309
57
        Stack.push_back(Succ);
310
57
        Queued[Succ->getBlockID()] = true;
311
57
      }
312
114
    }
313
138
  }
314
81
315
81
  
return false35
;
316
81
}
317
318
static void visitReachableThrows(
319
    CFG *BodyCFG,
320
5.56k
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
321
5.56k
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
322
5.56k
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
323
17.0k
  for (CFGBlock *B : *BodyCFG) {
324
17.0k
    if (!Reachable[B->getBlockID()])
325
36
      continue;
326
29.8k
    
for (CFGElement &E : *B)16.9k
{
327
29.8k
      Optional<CFGStmt> S = E.getAs<CFGStmt>();
328
29.8k
      if (!S)
329
513
        continue;
330
29.3k
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
331
81
        Visit(Throw, *B);
332
29.3k
    }
333
16.9k
  }
334
5.56k
}
335
336
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
337
46
                                                 const FunctionDecl *FD) {
338
46
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
339
46
      FD->getTypeSourceInfo()) {
340
46
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
341
46
    if (S.getLangOpts().CPlusPlus11 &&
342
46
        (isa<CXXDestructorDecl>(FD) ||
343
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Delete35
||
344
46
         
FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete34
)) {
345
12
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
346
12
                                         getAs<FunctionProtoType>())
347
12
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
348
12
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
349
12
            << FD->getExceptionSpecSourceRange();
350
12
    } else
351
34
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
352
34
          << FD->getExceptionSpecSourceRange();
353
46
  }
354
46
}
355
356
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
357
5.56k
                                        AnalysisDeclContext &AC) {
358
5.56k
  CFG *BodyCFG = AC.getCFG();
359
5.56k
  if (!BodyCFG)
360
0
    return;
361
5.56k
  if (BodyCFG->getExit().pred_empty())
362
0
    return;
363
5.56k
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
364
81
    if (throwEscapes(S, Throw, Block, BodyCFG))
365
46
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
366
81
  });
367
5.56k
}
368
369
101k
static bool isNoexcept(const FunctionDecl *FD) {
370
101k
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
371
101k
  if (FPT->isNothrow() || 
FD->hasAttr<NoThrowAttr>()96.3k
)
372
5.56k
    return true;
373
96.3k
  return false;
374
96.3k
}
375
376
//===----------------------------------------------------------------------===//
377
// Check for missing return value.
378
//===----------------------------------------------------------------------===//
379
380
enum ControlFlowKind {
381
  UnknownFallThrough,
382
  NeverFallThrough,
383
  MaybeFallThrough,
384
  AlwaysFallThrough,
385
  NeverFallThroughOrReturn
386
};
387
388
/// CheckFallThrough - Check that we don't fall off the end of a
389
/// Statement that should return a value.
390
///
391
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
392
/// MaybeFallThrough iff we might or might not fall off the end,
393
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
394
/// return.  We assume NeverFallThrough iff we never fall off the end of the
395
/// statement but we may return.  We assume that functions not marked noreturn
396
/// will return.
397
65.4k
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
398
65.4k
  CFG *cfg = AC.getCFG();
399
65.4k
  if (!cfg) 
return UnknownFallThrough39
;
400
65.4k
401
65.4k
  // The CFG leaves in dead things, and we don't want the dead code paths to
402
65.4k
  // confuse us, so we mark all live things first.
403
65.4k
  llvm::BitVector live(cfg->getNumBlockIDs());
404
65.4k
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
405
65.4k
                                                          live);
406
65.4k
407
65.4k
  bool AddEHEdges = AC.getAddEHEdges();
408
65.4k
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
409
852
    // When there are things remaining dead, and we didn't add EH edges
410
852
    // from CallExprs to the catch clauses, we have to go back and
411
852
    // mark them as live.
412
6.35k
    
for (const auto *B : *cfg)852
{
413
6.35k
      if (!live[B->getBlockID()]) {
414
1.48k
        if (B->pred_begin() == B->pred_end()) {
415
876
          const Stmt *Term = B->getTerminatorStmt();
416
876
          if (Term && 
isa<CXXTryStmt>(Term)138
)
417
67
            // When not adding EH edges from calls, catch clauses
418
67
            // can otherwise seem dead.  Avoid noting them as dead.
419
67
            count += reachable_code::ScanReachableFromBlock(B, live);
420
876
          continue;
421
876
        }
422
1.48k
      }
423
6.35k
    }
424
65.4k
425
65.4k
  // Now we know what is live, we check the live precessors of the exit block
426
65.4k
  // and look for fall through paths, being careful to ignore normal returns,
427
65.4k
  // and exceptional paths.
428
65.4k
  bool HasLiveReturn = false;
429
65.4k
  bool HasFakeEdge = false;
430
65.4k
  bool HasPlainEdge = false;
431
65.4k
  bool HasAbnormalEdge = false;
432
65.4k
433
65.4k
  // Ignore default cases that aren't likely to be reachable because all
434
65.4k
  // enums in a switch(X) have explicit case statements.
435
65.4k
  CFGBlock::FilterOptions FO;
436
65.4k
  FO.IgnoreDefaultsWithCoveredEnums = 1;
437
65.4k
438
65.4k
  for (CFGBlock::filtered_pred_iterator I =
439
65.4k
           cfg->getExit().filtered_pred_start_end(FO);
440
134k
       I.hasMore(); 
++I68.7k
) {
441
68.7k
    const CFGBlock &B = **I;
442
68.7k
    if (!live[B.getBlockID()])
443
653
      continue;
444
68.1k
445
68.1k
    // Skip blocks which contain an element marked as no-return. They don't
446
68.1k
    // represent actually viable edges into the exit block, so mark them as
447
68.1k
    // abnormal.
448
68.1k
    if (B.hasNoReturnElement()) {
449
332
      HasAbnormalEdge = true;
450
332
      continue;
451
332
    }
452
67.7k
453
67.7k
    // Destructors can appear after the 'return' in the CFG.  This is
454
67.7k
    // normal.  We need to look pass the destructors for the return
455
67.7k
    // statement (if it exists).
456
67.7k
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
457
67.7k
458
69.3k
    for ( ; ri != re ; 
++ri1.56k
)
459
68.9k
      if (ri->getAs<CFGStmt>())
460
67.3k
        break;
461
67.7k
462
67.7k
    // No more CFGElements in the block?
463
67.7k
    if (ri == re) {
464
388
      const Stmt *Term = B.getTerminatorStmt();
465
388
      if (Term && 
isa<CXXTryStmt>(Term)59
) {
466
55
        HasAbnormalEdge = true;
467
55
        continue;
468
55
      }
469
333
      // A labeled empty statement, or the entry block...
470
333
      HasPlainEdge = true;
471
333
      continue;
472
333
    }
473
67.3k
474
67.3k
    CFGStmt CS = ri->castAs<CFGStmt>();
475
67.3k
    const Stmt *S = CS.getStmt();
476
67.3k
    if (isa<ReturnStmt>(S) || 
isa<CoreturnStmt>(S)303
) {
477
67.0k
      HasLiveReturn = true;
478
67.0k
      continue;
479
67.0k
    }
480
290
    if (isa<ObjCAtThrowStmt>(S)) {
481
5
      HasFakeEdge = true;
482
5
      continue;
483
5
    }
484
285
    if (isa<CXXThrowExpr>(S)) {
485
48
      HasFakeEdge = true;
486
48
      continue;
487
48
    }
488
237
    if (isa<MSAsmStmt>(S)) {
489
13
      // TODO: Verify this is correct.
490
13
      HasFakeEdge = true;
491
13
      HasLiveReturn = true;
492
13
      continue;
493
13
    }
494
224
    if (isa<CXXTryStmt>(S)) {
495
0
      HasAbnormalEdge = true;
496
0
      continue;
497
0
    }
498
224
    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
499
224
        == B.succ_end()) {
500
0
      HasAbnormalEdge = true;
501
0
      continue;
502
0
    }
503
224
504
224
    HasPlainEdge = true;
505
224
  }
506
65.4k
  if (!HasPlainEdge) {
507
64.8k
    if (HasLiveReturn)
508
64.6k
      return NeverFallThrough;
509
276
    return NeverFallThroughOrReturn;
510
276
  }
511
547
  if (HasAbnormalEdge || 
HasFakeEdge529
||
HasLiveReturn529
)
512
39
    return MaybeFallThrough;
513
508
  // This says AlwaysFallThrough for calls to functions that are not marked
514
508
  // noreturn, that don't return.  If people would like this warning to be more
515
508
  // accurate, such functions should be marked as noreturn.
516
508
  return AlwaysFallThrough;
517
508
}
518
519
namespace {
520
521
struct CheckFallThroughDiagnostics {
522
  unsigned diag_MaybeFallThrough_HasNoReturn;
523
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
524
  unsigned diag_AlwaysFallThrough_HasNoReturn;
525
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
526
  unsigned diag_NeverFallThroughOrReturn;
527
  enum { Function, Block, Lambda, Coroutine } funMode;
528
  SourceLocation FuncLoc;
529
530
150k
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
531
150k
    CheckFallThroughDiagnostics D;
532
150k
    D.FuncLoc = Func->getLocation();
533
150k
    D.diag_MaybeFallThrough_HasNoReturn =
534
150k
      diag::warn_falloff_noreturn_function;
535
150k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
536
150k
      diag::warn_maybe_falloff_nonvoid_function;
537
150k
    D.diag_AlwaysFallThrough_HasNoReturn =
538
150k
      diag::warn_falloff_noreturn_function;
539
150k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
540
150k
      diag::warn_falloff_nonvoid_function;
541
150k
542
150k
    // Don't suggest that virtual functions be marked "noreturn", since they
543
150k
    // might be overridden by non-noreturn functions.
544
150k
    bool isVirtualMethod = false;
545
150k
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
546
36.3k
      isVirtualMethod = Method->isVirtual();
547
150k
548
150k
    // Don't suggest that template instantiations be marked "noreturn"
549
150k
    bool isTemplateInstantiation = false;
550
150k
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
551
145k
      isTemplateInstantiation = Function->isTemplateInstantiation();
552
150k
553
150k
    if (!isVirtualMethod && 
!isTemplateInstantiation148k
)
554
135k
      D.diag_NeverFallThroughOrReturn =
555
135k
        diag::warn_suggest_noreturn_function;
556
14.9k
    else
557
14.9k
      D.diag_NeverFallThroughOrReturn = 0;
558
150k
559
150k
    D.funMode = Function;
560
150k
    return D;
561
150k
  }
562
563
67
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
564
67
    CheckFallThroughDiagnostics D;
565
67
    D.FuncLoc = Func->getLocation();
566
67
    D.diag_MaybeFallThrough_HasNoReturn = 0;
567
67
    D.diag_MaybeFallThrough_ReturnsNonVoid =
568
67
        diag::warn_maybe_falloff_nonvoid_coroutine;
569
67
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
570
67
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
571
67
        diag::warn_falloff_nonvoid_coroutine;
572
67
    D.funMode = Coroutine;
573
67
    return D;
574
67
  }
575
576
1.94k
  static CheckFallThroughDiagnostics MakeForBlock() {
577
1.94k
    CheckFallThroughDiagnostics D;
578
1.94k
    D.diag_MaybeFallThrough_HasNoReturn =
579
1.94k
      diag::err_noreturn_block_has_return_expr;
580
1.94k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
581
1.94k
      diag::err_maybe_falloff_nonvoid_block;
582
1.94k
    D.diag_AlwaysFallThrough_HasNoReturn =
583
1.94k
      diag::err_noreturn_block_has_return_expr;
584
1.94k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
585
1.94k
      diag::err_falloff_nonvoid_block;
586
1.94k
    D.diag_NeverFallThroughOrReturn = 0;
587
1.94k
    D.funMode = Block;
588
1.94k
    return D;
589
1.94k
  }
590
591
2.63k
  static CheckFallThroughDiagnostics MakeForLambda() {
592
2.63k
    CheckFallThroughDiagnostics D;
593
2.63k
    D.diag_MaybeFallThrough_HasNoReturn =
594
2.63k
      diag::err_noreturn_lambda_has_return_expr;
595
2.63k
    D.diag_MaybeFallThrough_ReturnsNonVoid =
596
2.63k
      diag::warn_maybe_falloff_nonvoid_lambda;
597
2.63k
    D.diag_AlwaysFallThrough_HasNoReturn =
598
2.63k
      diag::err_noreturn_lambda_has_return_expr;
599
2.63k
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
600
2.63k
      diag::warn_falloff_nonvoid_lambda;
601
2.63k
    D.diag_NeverFallThroughOrReturn = 0;
602
2.63k
    D.funMode = Lambda;
603
2.63k
    return D;
604
2.63k
  }
605
606
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
607
155k
                        bool HasNoReturn) const {
608
155k
    if (funMode == Function) {
609
150k
      return (ReturnsVoid ||
610
150k
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
611
64.5k
                          FuncLoc)) &&
612
150k
             
(86.1k
!HasNoReturn86.1k
||
613
86.1k
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
614
81
                          FuncLoc)) &&
615
150k
             
(86.1k
!ReturnsVoid86.1k
||
616
86.1k
              
D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc)86.0k
);
617
150k
    }
618
4.64k
    if (funMode == Coroutine) {
619
67
      return (ReturnsVoid ||
620
67
              
D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc)16
||
621
67
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
622
16
                          FuncLoc)) &&
623
67
             
(!HasNoReturn)51
;
624
67
    }
625
4.57k
    // For blocks / lambdas.
626
4.57k
    return ReturnsVoid && 
!HasNoReturn3.72k
;
627
4.57k
  }
628
};
629
630
} // anonymous namespace
631
632
/// CheckFallThroughForBody - Check that we don't fall off the end of a
633
/// function that should return a value.  Check that we don't fall off the end
634
/// of a noreturn function.  We assume that functions and blocks not marked
635
/// noreturn will return.
636
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
637
                                    QualType BlockType,
638
                                    const CheckFallThroughDiagnostics &CD,
639
                                    AnalysisDeclContext &AC,
640
155k
                                    sema::FunctionScopeInfo *FSI) {
641
155k
642
155k
  bool ReturnsVoid = false;
643
155k
  bool HasNoReturn = false;
644
155k
  bool IsCoroutine = FSI->isCoroutine();
645
155k
646
155k
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
647
148k
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
648
68
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
649
148k
    else
650
148k
      ReturnsVoid = FD->getReturnType()->isVoidType();
651
148k
    HasNoReturn = FD->isNoReturn();
652
148k
  }
653
7.00k
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
654
5.06k
    ReturnsVoid = MD->getReturnType()->isVoidType();
655
5.06k
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
656
5.06k
  }
657
1.94k
  else 
if (1.94k
isa<BlockDecl>(D)1.94k
) {
658
1.94k
    if (const FunctionType *FT =
659
1.94k
          BlockType->getPointeeType()->getAs<FunctionType>()) {
660
1.94k
      if (FT->getReturnType()->isVoidType())
661
1.55k
        ReturnsVoid = true;
662
1.94k
      if (FT->getNoReturnAttr())
663
1
        HasNoReturn = true;
664
1.94k
    }
665
1.94k
  }
666
155k
667
155k
  DiagnosticsEngine &Diags = S.getDiagnostics();
668
155k
669
155k
  // Short circuit for compilation speed.
670
155k
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
671
89.8k
      return;
672
65.4k
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
673
65.4k
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
674
500
    if (IsCoroutine)
675
6
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
676
494
    else
677
494
      S.Diag(Loc, DiagID);
678
500
  };
679
65.4k
680
65.4k
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
681
65.4k
  if (D->getAsFunction() && 
D->getAsFunction()->isCPUDispatchMultiVersion()62.2k
)
682
0
    return;
683
65.4k
684
65.4k
  // Either in a function body compound statement, or a function-try-block.
685
65.4k
  switch (CheckFallThrough(AC)) {
686
39
    case UnknownFallThrough:
687
39
      break;
688
0
689
39
    case MaybeFallThrough:
690
39
      if (HasNoReturn)
691
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
692
39
      else if (!ReturnsVoid)
693
38
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
694
39
      break;
695
508
    case AlwaysFallThrough:
696
508
      if (HasNoReturn)
697
21
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
698
487
      else if (!ReturnsVoid)
699
441
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
700
508
      break;
701
276
    case NeverFallThroughOrReturn:
702
276
      if (ReturnsVoid && 
!HasNoReturn68
&&
CD.diag_NeverFallThroughOrReturn9
) {
703
4
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
704
3
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
705
3
        } else 
if (const ObjCMethodDecl *1
MD1
= dyn_cast<ObjCMethodDecl>(D)) {
706
1
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
707
1
        } else {
708
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
709
0
        }
710
4
      }
711
276
      break;
712
64.6k
    case NeverFallThrough:
713
64.6k
      break;
714
65.4k
  }
715
65.4k
}
716
717
//===----------------------------------------------------------------------===//
718
// -Wuninitialized
719
//===----------------------------------------------------------------------===//
720
721
namespace {
722
/// ContainsReference - A visitor class to search for references to
723
/// a particular declaration (the needle) within any evaluated component of an
724
/// expression (recursively).
725
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
726
  bool FoundReference;
727
  const DeclRefExpr *Needle;
728
729
public:
730
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
731
732
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
733
66
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
734
735
293
  void VisitExpr(const Expr *E) {
736
293
    // Stop evaluating if we already have a reference.
737
293
    if (FoundReference)
738
24
      return;
739
269
740
269
    Inherited::VisitExpr(E);
741
269
  }
742
743
124
  void VisitDeclRefExpr(const DeclRefExpr *E) {
744
124
    if (E == Needle)
745
64
      FoundReference = true;
746
60
    else
747
60
      Inherited::VisitDeclRefExpr(E);
748
124
  }
749
750
66
  bool doesContainReference() const { return FoundReference; }
751
};
752
} // anonymous namespace
753
754
727
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
755
727
  QualType VariableTy = VD->getType().getCanonicalType();
756
727
  if (VariableTy->isBlockPointerType() &&
757
727
      
!VD->hasAttr<BlocksAttr>()3
) {
758
3
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
759
3
        << VD->getDeclName()
760
3
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
761
3
    return true;
762
3
  }
763
724
764
724
  // Don't issue a fixit if there is already an initializer.
765
724
  if (VD->getInit())
766
2
    return false;
767
722
768
722
  // Don't suggest a fixit inside macros.
769
722
  if (VD->getEndLoc().isMacroID())
770
2
    return false;
771
720
772
720
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
773
720
774
720
  // Suggest possible initialization (if any).
775
720
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
776
720
  if (Init.empty())
777
5
    return false;
778
715
779
715
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
780
715
    << FixItHint::CreateInsertion(Loc, Init);
781
715
  return true;
782
715
}
783
784
/// Create a fixit to remove an if-like statement, on the assumption that its
785
/// condition is CondVal.
786
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
787
                          const Stmt *Else, bool CondVal,
788
23
                          FixItHint &Fixit1, FixItHint &Fixit2) {
789
23
  if (CondVal) {
790
12
    // If condition is always true, remove all but the 'then'.
791
12
    Fixit1 = FixItHint::CreateRemoval(
792
12
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
793
12
    if (Else) {
794
2
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
795
2
      Fixit2 =
796
2
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
797
2
    }
798
12
  } else {
799
11
    // If condition is always false, remove all but the 'else'.
800
11
    if (Else)
801
11
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
802
11
          If->getBeginLoc(), Else->getBeginLoc()));
803
0
    else
804
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
805
11
  }
806
23
}
807
808
/// DiagUninitUse -- Helper function to produce a diagnostic for an
809
/// uninitialized use of a variable.
810
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
811
724
                          bool IsCapturedByBlock) {
812
724
  bool Diagnosed = false;
813
724
814
724
  switch (Use.getKind()) {
815
634
  case UninitUse::Always:
816
634
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
817
634
        << VD->getDeclName() << IsCapturedByBlock
818
634
        << Use.getUser()->getSourceRange();
819
634
    return;
820
0
821
8
  case UninitUse::AfterDecl:
822
8
  case UninitUse::AfterCall:
823
8
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
824
8
      << VD->getDeclName() << IsCapturedByBlock
825
8
      << (Use.getKind() == UninitUse::AfterDecl ? 
42
:
56
)
826
8
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
827
8
      << VD->getSourceRange();
828
8
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
829
8
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
830
8
    return;
831
8
832
82
  case UninitUse::Maybe:
833
82
  case UninitUse::Sometimes:
834
82
    // Carry on to report sometimes-uninitialized branches, if possible,
835
82
    // or a 'may be used uninitialized' diagnostic otherwise.
836
82
    break;
837
82
  }
838
82
839
82
  // Diagnose each branch which leads to a sometimes-uninitialized use.
840
82
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
841
137
       I != E; 
++I55
) {
842
55
    assert(Use.getKind() == UninitUse::Sometimes);
843
55
844
55
    const Expr *User = Use.getUser();
845
55
    const Stmt *Term = I->Terminator;
846
55
847
55
    // Information used when building the diagnostic.
848
55
    unsigned DiagKind;
849
55
    StringRef Str;
850
55
    SourceRange Range;
851
55
852
55
    // FixIts to suppress the diagnostic by removing the dead condition.
853
55
    // For all binary terminators, branch 0 is taken if the condition is true,
854
55
    // and branch 1 is taken if the condition is false.
855
55
    int RemoveDiagKind = -1;
856
55
    const char *FixitStr =
857
55
        S.getLangOpts().CPlusPlus ? 
(I->Output 47
?
"true"20
:
"false"27
)
858
55
                                  : 
(I->Output 8
?
"1"4
:
"0"4
);
859
55
    FixItHint Fixit1, Fixit2;
860
55
861
55
    switch (Term ? Term->getStmtClass() : 
Stmt::DeclStmtClass0
) {
862
0
    default:
863
0
      // Don't know how to report this. Just fall back to 'may be used
864
0
      // uninitialized'. FIXME: Can this happen?
865
0
      continue;
866
0
867
0
    // "condition is true / condition is false".
868
19
    case Stmt::IfStmtClass: {
869
19
      const IfStmt *IS = cast<IfStmt>(Term);
870
19
      DiagKind = 0;
871
19
      Str = "if";
872
19
      Range = IS->getCond()->getSourceRange();
873
19
      RemoveDiagKind = 0;
874
19
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
875
19
                    I->Output, Fixit1, Fixit2);
876
19
      break;
877
0
    }
878
4
    case Stmt::ConditionalOperatorClass: {
879
4
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
880
4
      DiagKind = 0;
881
4
      Str = "?:";
882
4
      Range = CO->getCond()->getSourceRange();
883
4
      RemoveDiagKind = 0;
884
4
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
885
4
                    I->Output, Fixit1, Fixit2);
886
4
      break;
887
0
    }
888
12
    case Stmt::BinaryOperatorClass: {
889
12
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
890
12
      if (!BO->isLogicalOp())
891
0
        continue;
892
12
      DiagKind = 0;
893
12
      Str = BO->getOpcodeStr();
894
12
      Range = BO->getLHS()->getSourceRange();
895
12
      RemoveDiagKind = 0;
896
12
      if ((BO->getOpcode() == BO_LAnd && 
I->Output4
) ||
897
12
          
(10
BO->getOpcode() == BO_LOr10
&&
!I->Output8
))
898
8
        // true && y -> y, false || y -> y.
899
8
        Fixit1 = FixItHint::CreateRemoval(
900
8
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
901
4
      else
902
4
        // false && y -> false, true || y -> true.
903
4
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
904
12
      break;
905
12
    }
906
12
907
12
    // "loop is entered / loop is exited".
908
12
    case Stmt::WhileStmtClass:
909
4
      DiagKind = 1;
910
4
      Str = "while";
911
4
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
912
4
      RemoveDiagKind = 1;
913
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
914
4
      break;
915
12
    case Stmt::ForStmtClass:
916
4
      DiagKind = 1;
917
4
      Str = "for";
918
4
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
919
4
      RemoveDiagKind = 1;
920
4
      if (I->Output)
921
2
        Fixit1 = FixItHint::CreateRemoval(Range);
922
2
      else
923
2
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
924
4
      break;
925
12
    case Stmt::CXXForRangeStmtClass:
926
4
      if (I->Output == 1) {
927
2
        // The use occurs if a range-based for loop's body never executes.
928
2
        // That may be impossible, and there's no syntactic fix for this,
929
2
        // so treat it as a 'may be uninitialized' case.
930
2
        continue;
931
2
      }
932
2
      DiagKind = 1;
933
2
      Str = "for";
934
2
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
935
2
      break;
936
2
937
2
    // "condition is true / loop is exited".
938
4
    case Stmt::DoStmtClass:
939
4
      DiagKind = 2;
940
4
      Str = "do";
941
4
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
942
4
      RemoveDiagKind = 1;
943
4
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
944
4
      break;
945
2
946
2
    // "switch case is taken".
947
2
    case Stmt::CaseStmtClass:
948
2
      DiagKind = 3;
949
2
      Str = "case";
950
2
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
951
2
      break;
952
2
    case Stmt::DefaultStmtClass:
953
2
      DiagKind = 3;
954
2
      Str = "default";
955
2
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
956
2
      break;
957
53
    }
958
53
959
53
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
960
53
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
961
53
      << Str << I->Output << Range;
962
53
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
963
53
        << IsCapturedByBlock << User->getSourceRange();
964
53
    if (RemoveDiagKind != -1)
965
47
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
966
47
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
967
53
968
53
    Diagnosed = true;
969
53
  }
970
82
971
82
  if (!Diagnosed)
972
31
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
973
31
        << VD->getDeclName() << IsCapturedByBlock
974
31
        << Use.getUser()->getSourceRange();
975
82
}
976
977
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
978
/// uninitialized variable. This manages the different forms of diagnostic
979
/// emitted for particular types of uses. Returns true if the use was diagnosed
980
/// as a warning. If a particular use is one we omit warnings for, returns
981
/// false.
982
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
983
                                     const UninitUse &Use,
984
791
                                     bool alwaysReportSelfInit = false) {
985
791
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
986
786
    // Inspect the initializer of the variable declaration which is
987
786
    // being referenced prior to its initialization. We emit
988
786
    // specialized diagnostics for self-initialization, and we
989
786
    // specifically avoid warning about self references which take the
990
786
    // form of:
991
786
    //
992
786
    //   int x = x;
993
786
    //
994
786
    // This is used to indicate to GCC that 'x' is intentionally left
995
786
    // uninitialized. Proven code paths which access 'x' in
996
786
    // an uninitialized state after this will still warn.
997
786
    if (const Expr *Initializer = VD->getInit()) {
998
66
      if (!alwaysReportSelfInit && 
DRE == Initializer->IgnoreParenImpCasts()64
)
999
0
        return false;
1000
66
1001
66
      ContainsReference CR(S.Context, DRE);
1002
66
      CR.Visit(Initializer);
1003
66
      if (CR.doesContainReference()) {
1004
64
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1005
64
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1006
64
        return true;
1007
64
      }
1008
722
    }
1009
722
1010
722
    DiagUninitUse(S, VD, Use, false);
1011
722
  } else {
1012
5
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1013
5
    if (VD->getType()->isBlockPointerType() && 
!VD->hasAttr<BlocksAttr>()3
)
1014
3
      S.Diag(BE->getBeginLoc(),
1015
3
             diag::warn_uninit_byref_blockvar_captured_by_block)
1016
3
          << VD->getDeclName()
1017
3
          << VD->getType().getQualifiers().hasObjCLifetime();
1018
2
    else
1019
2
      DiagUninitUse(S, VD, Use, true);
1020
5
  }
1021
791
1022
791
  // Report where the variable was declared when the use wasn't within
1023
791
  // the initializer of that declaration & we didn't already suggest
1024
791
  // an initialization fixit.
1025
791
  
if (727
!SuggestInitializationFixit(S, VD)727
)
1026
9
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1027
9
        << VD->getDeclName();
1028
727
1029
727
  return true;
1030
791
}
1031
1032
namespace {
1033
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1034
  public:
1035
    FallthroughMapper(Sema &S)
1036
      : FoundSwitchStatements(false),
1037
119
        S(S) {
1038
119
    }
1039
1040
119
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1041
1042
43
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1043
43
      bool Found = FallthroughStmts.erase(Stmt);
1044
43
      assert(Found);
1045
43
      (void)Found;
1046
43
    }
1047
1048
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1049
1050
91
    const AttrStmts &getFallthroughStmts() const {
1051
91
      return FallthroughStmts;
1052
91
    }
1053
1054
76
    void fillReachableBlocks(CFG *Cfg) {
1055
76
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1056
76
      std::deque<const CFGBlock *> BlockQueue;
1057
76
1058
76
      ReachableBlocks.insert(&Cfg->getEntry());
1059
76
      BlockQueue.push_back(&Cfg->getEntry());
1060
76
      // Mark all case blocks reachable to avoid problems with switching on
1061
76
      // constants, covered enums, etc.
1062
76
      // These blocks can contain fall-through annotations, and we don't want to
1063
76
      // issue a warn_fallthrough_attr_unreachable for them.
1064
650
      for (const auto *B : *Cfg) {
1065
650
        const Stmt *L = B->getLabel();
1066
650
        if (L && 
isa<SwitchCase>(L)266
&&
ReachableBlocks.insert(B).second263
)
1067
263
          BlockQueue.push_back(B);
1068
650
      }
1069
76
1070
696
      while (!BlockQueue.empty()) {
1071
620
        const CFGBlock *P = BlockQueue.front();
1072
620
        BlockQueue.pop_front();
1073
620
        for (CFGBlock::const_succ_iterator I = P->succ_begin(),
1074
620
                                           E = P->succ_end();
1075
1.44k
             I != E; 
++I825
) {
1076
825
          if (*I && 
ReachableBlocks.insert(*I).second805
)
1077
281
            BlockQueue.push_back(*I);
1078
825
        }
1079
620
      }
1080
76
    }
1081
1082
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1083
263
                                   bool IsTemplateInstantiation) {
1084
263
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1085
263
1086
263
      int UnannotatedCnt = 0;
1087
263
      AnnotatedCnt = 0;
1088
263
1089
263
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1090
688
      while (!BlockQueue.empty()) {
1091
425
        const CFGBlock *P = BlockQueue.front();
1092
425
        BlockQueue.pop_front();
1093
425
        if (!P) 
continue6
;
1094
419
1095
419
        const Stmt *Term = P->getTerminatorStmt();
1096
419
        if (Term && 
isa<SwitchStmt>(Term)284
)
1097
262
          continue; // Switch statement, good.
1098
157
1099
157
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1100
157
        if (SW && 
SW->getSubStmt() == B.getLabel()121
&&
P->begin() == P->end()4
)
1101
4
          continue; // Previous case label has no statements, good.
1102
153
1103
153
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1104
153
        if (L && 
L->getSubStmt() == B.getLabel()3
&&
P->begin() == P->end()3
)
1105
3
          continue; // Case label is preceded with a normal label, good.
1106
150
1107
150
        if (!ReachableBlocks.count(P)) {
1108
17
          for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
1109
17
                                                ElemEnd = P->rend();
1110
43
               ElemIt != ElemEnd; 
++ElemIt26
) {
1111
31
            if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
1112
29
              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1113
5
                // Don't issue a warning for an unreachable fallthrough
1114
5
                // attribute in template instantiations as it may not be
1115
5
                // unreachable in all instantiations of the template.
1116
5
                if (!IsTemplateInstantiation)
1117
4
                  S.Diag(AS->getBeginLoc(),
1118
4
                         diag::warn_fallthrough_attr_unreachable);
1119
5
                markFallthroughVisited(AS);
1120
5
                ++AnnotatedCnt;
1121
5
                break;
1122
5
              }
1123
29
              // Don't care about other unreachable statements.
1124
29
            }
1125
31
          }
1126
17
          // If there are no unreachable statements, this may be a special
1127
17
          // case in CFG:
1128
17
          // case X: {
1129
17
          //    A a;  // A has a destructor.
1130
17
          //    break;
1131
17
          // }
1132
17
          // // <<<< This place is represented by a 'hanging' CFG block.
1133
17
          // case Y:
1134
17
          continue;
1135
17
        }
1136
133
1137
133
        const Stmt *LastStmt = getLastStmt(*P);
1138
133
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1139
38
          markFallthroughVisited(AS);
1140
38
          ++AnnotatedCnt;
1141
38
          continue; // Fallthrough annotation, good.
1142
38
        }
1143
95
1144
95
        if (!LastStmt) { // This block contains no executable statements.
1145
2
          // Traverse its predecessors.
1146
2
          std::copy(P->pred_begin(), P->pred_end(),
1147
2
                    std::back_inserter(BlockQueue));
1148
2
          continue;
1149
2
        }
1150
93
1151
93
        if (isFollowedByFallThroughComment(LastStmt)) {
1152
2
          ++AnnotatedCnt;
1153
2
          continue; // Fallthrough comment, good.
1154
2
        }
1155
91
1156
91
        ++UnannotatedCnt;
1157
91
      }
1158
263
      return !!UnannotatedCnt;
1159
263
    }
1160
1161
    // RecursiveASTVisitor setup.
1162
13
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1163
1164
59
    bool VisitAttributedStmt(AttributedStmt *S) {
1165
59
      if (asFallThroughAttr(S))
1166
59
        FallthroughStmts.insert(S);
1167
59
      return true;
1168
59
    }
1169
1170
100
    bool VisitSwitchStmt(SwitchStmt *S) {
1171
100
      FoundSwitchStatements = true;
1172
100
      return true;
1173
100
    }
1174
1175
    // We don't want to traverse local type declarations. We analyze their
1176
    // methods separately.
1177
24
    bool TraverseDecl(Decl *D) { return true; }
1178
1179
    // We analyze lambda bodies separately. Skip them here.
1180
1
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1181
1
      // Traverse the captures, but not the body.
1182
1
      for (const auto C : zip(LE->captures(), LE->capture_inits()))
1183
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1184
1
      return true;
1185
1
    }
1186
1187
  private:
1188
1189
221
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1190
221
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1191
102
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1192
102
          return AS;
1193
119
      }
1194
119
      return nullptr;
1195
119
    }
1196
1197
133
    static const Stmt *getLastStmt(const CFGBlock &B) {
1198
133
      if (const Stmt *Term = B.getTerminatorStmt())
1199
18
        return Term;
1200
115
      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
1201
115
                                            ElemEnd = B.rend();
1202
115
                                            ElemIt != ElemEnd; 
++ElemIt0
) {
1203
109
        if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
1204
109
          return CS->getStmt();
1205
109
      }
1206
115
      // Workaround to detect a statement thrown out by CFGBuilder:
1207
115
      //   case X: {} case Y:
1208
115
      //   case X: ; case Y:
1209
115
      
if (const SwitchCase *6
SW6
= dyn_cast_or_null<SwitchCase>(B.getLabel()))
1210
4
        if (!isa<SwitchCase>(SW->getSubStmt()))
1211
4
          return SW->getSubStmt();
1212
2
1213
2
      return nullptr;
1214
2
    }
1215
1216
93
    bool isFollowedByFallThroughComment(const Stmt *Statement) {
1217
93
      // Try to detect whether the fallthough is marked by a comment like
1218
93
      // /*FALLTHOUGH*/.
1219
93
      bool Invalid;
1220
93
      const char *SourceData = S.getSourceManager().getCharacterData(
1221
93
          Statement->getEndLoc(), &Invalid);
1222
93
      if (Invalid)
1223
0
        return false;
1224
93
      const char *LineStart = SourceData;
1225
100
      for (;;) {
1226
100
        LineStart = strchr(LineStart, '\n');
1227
100
        if (LineStart == nullptr)
1228
0
          return false;
1229
100
        ++LineStart; // Start of next line.
1230
100
        const char *LineEnd = strchr(LineStart, '\n');
1231
100
        StringRef Line(LineStart,
1232
100
                       LineEnd ? LineEnd - LineStart : 
strlen(LineStart)0
);
1233
100
        if (LineStart == LineEnd ||
1234
100
            
Line.find_first_not_of(" \t\r") == StringRef::npos93
)
1235
7
          continue; // Whitespace-only line.
1236
93
        if (!FallthroughRegex.isValid())
1237
56
          FallthroughRegex =
1238
56
              llvm::Regex("(/\\*[ \\t]*fall(s | |-)?thr(ough|u)\\.?[ \\t]*\\*/)"
1239
56
                          "|(//[ \\t]*fall(s | |-)?thr(ough|u)\\.?[ \\t]*)",
1240
56
                          llvm::Regex::IgnoreCase);
1241
93
        assert(FallthroughRegex.isValid());
1242
93
        return FallthroughRegex.match(Line);
1243
93
      }
1244
93
    }
1245
1246
    bool FoundSwitchStatements;
1247
    AttrStmts FallthroughStmts;
1248
    Sema &S;
1249
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1250
    llvm::Regex FallthroughRegex;
1251
  };
1252
} // anonymous namespace
1253
1254
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1255
66
                                            SourceLocation Loc) {
1256
66
  TokenValue FallthroughTokens[] = {
1257
66
    tok::l_square, tok::l_square,
1258
66
    PP.getIdentifierInfo("fallthrough"),
1259
66
    tok::r_square, tok::r_square
1260
66
  };
1261
66
1262
66
  TokenValue ClangFallthroughTokens[] = {
1263
66
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1264
66
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1265
66
    tok::r_square, tok::r_square
1266
66
  };
1267
66
1268
66
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && 
!PP.getLangOpts().C2x40
;
1269
66
1270
66
  StringRef MacroName;
1271
66
  if (PreferClangAttr)
1272
39
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1273
66
  if (MacroName.empty())
1274
59
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1275
66
  if (MacroName.empty() && 
!PreferClangAttr51
)
1276
22
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1277
66
  if (MacroName.empty()) {
1278
41
    if (!PreferClangAttr)
1279
12
      MacroName = "[[fallthrough]]";
1280
29
    else if (PP.getLangOpts().CPlusPlus)
1281
21
      MacroName = "[[clang::fallthrough]]";
1282
8
    else
1283
8
      MacroName = "__attribute__((fallthrough))";
1284
41
  }
1285
66
  return MacroName;
1286
66
}
1287
1288
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1289
119
                                            bool PerFunction) {
1290
119
  FallthroughMapper FM(S);
1291
119
  FM.TraverseStmt(AC.getBody());
1292
119
1293
119
  if (!FM.foundSwitchStatements())
1294
42
    return;
1295
77
1296
77
  if (PerFunction && 
FM.getFallthroughStmts().empty()15
)
1297
1
    return;
1298
76
1299
76
  CFG *Cfg = AC.getCFG();
1300
76
1301
76
  if (!Cfg)
1302
0
    return;
1303
76
1304
76
  FM.fillReachableBlocks(Cfg);
1305
76
1306
650
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1307
650
    const Stmt *Label = B->getLabel();
1308
650
1309
650
    if (!Label || 
!isa<SwitchCase>(Label)266
)
1310
387
      continue;
1311
263
1312
263
    int AnnotatedCnt;
1313
263
1314
263
    bool IsTemplateInstantiation = false;
1315
263
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1316
260
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1317
263
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1318
263
                                      IsTemplateInstantiation))
1319
180
      continue;
1320
83
1321
83
    S.Diag(Label->getBeginLoc(),
1322
83
           PerFunction ? 
diag::warn_unannotated_fallthrough_per_function10
1323
83
                       : 
diag::warn_unannotated_fallthrough73
);
1324
83
1325
83
    if (!AnnotatedCnt) {
1326
82
      SourceLocation L = Label->getBeginLoc();
1327
82
      if (L.isMacroID())
1328
10
        continue;
1329
72
1330
72
      const Stmt *Term = B->getTerminatorStmt();
1331
72
      // Skip empty cases.
1332
116
      while (B->empty() && 
!Term50
&&
B->succ_size() == 144
) {
1333
44
        B = *B->succ_begin();
1334
44
        Term = B->getTerminatorStmt();
1335
44
      }
1336
72
      if (!(B->empty() && 
Term6
&&
isa<BreakStmt>(Term)6
)) {
1337
66
        Preprocessor &PP = S.getPreprocessor();
1338
66
        StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1339
66
        SmallString<64> TextToInsert(AnnotationSpelling);
1340
66
        TextToInsert += "; ";
1341
66
        S.Diag(L, diag::note_insert_fallthrough_fixit)
1342
66
            << AnnotationSpelling
1343
66
            << FixItHint::CreateInsertion(L, TextToInsert);
1344
66
      }
1345
72
      S.Diag(L, diag::note_insert_break_fixit)
1346
72
          << FixItHint::CreateInsertion(L, "break; ");
1347
72
    }
1348
83
  }
1349
76
1350
76
  for (const auto *F : FM.getFallthroughStmts())
1351
16
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1352
76
}
1353
1354
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1355
68
                     const Stmt *S) {
1356
68
  assert(S);
1357
68
1358
410
  do {
1359
410
    switch (S->getStmtClass()) {
1360
18
    case Stmt::ForStmtClass:
1361
18
    case Stmt::WhileStmtClass:
1362
18
    case Stmt::CXXForRangeStmtClass:
1363
18
    case Stmt::ObjCForCollectionStmtClass:
1364
18
      return true;
1365
18
    case Stmt::DoStmtClass: {
1366
4
      Expr::EvalResult Result;
1367
4
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1368
2
        return true;
1369
2
      return Result.Val.getInt().getBoolValue();
1370
2
    }
1371
388
    default:
1372
388
      break;
1373
388
    }
1374
388
  } while ((S = PM.getParent(S)));
1375
68
1376
68
  
return false46
;
1377
68
}
1378
1379
static void diagnoseRepeatedUseOfWeak(Sema &S,
1380
                                      const sema::FunctionScopeInfo *CurFn,
1381
                                      const Decl *D,
1382
111
                                      const ParentMap &PM) {
1383
111
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1384
111
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1385
111
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1386
111
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1387
111
  StmtUsesPair;
1388
111
1389
111
  ASTContext &Ctx = S.getASTContext();
1390
111
1391
111
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1392
111
1393
111
  // Extract all weak objects that are referenced more than once.
1394
111
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1395
111
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1396
265
       I != E; 
++I154
) {
1397
154
    const WeakUseVector &Uses = I->second;
1398
154
1399
154
    // Find the first read of the weak object.
1400
154
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1401
216
    for ( ; UI != UE; 
++UI62
) {
1402
196
      if (UI->isUnsafe())
1403
134
        break;
1404
196
    }
1405
154
1406
154
    // If there were only writes to this object, don't warn.
1407
154
    if (UI == UE)
1408
20
      continue;
1409
134
1410
134
    // If there was only one read, followed by any number of writes, and the
1411
134
    // read is not within a loop, don't warn. Additionally, don't warn in a
1412
134
    // loop if the base object is a local variable -- local variables are often
1413
134
    // changed in loops.
1414
134
    if (UI == Uses.begin()) {
1415
116
      WeakUseVector::const_iterator UI2 = UI;
1416
142
      for (++UI2; UI2 != UE; 
++UI226
)
1417
74
        if (UI2->isUnsafe())
1418
48
          break;
1419
116
1420
116
      if (UI2 == UE) {
1421
68
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1422
48
          continue;
1423
20
1424
20
        const WeakObjectProfileTy &Profile = I->first;
1425
20
        if (!Profile.isExactProfile())
1426
2
          continue;
1427
18
1428
18
        const NamedDecl *Base = Profile.getBase();
1429
18
        if (!Base)
1430
2
          Base = Profile.getProperty();
1431
18
        assert(Base && "A profile always has a base or property.");
1432
18
1433
18
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1434
18
          if (BaseVar->hasLocalStorage() && 
!isa<ParmVarDecl>(Base)16
)
1435
2
            continue;
1436
82
      }
1437
116
    }
1438
82
1439
82
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1440
82
  }
1441
111
1442
111
  if (UsesByStmt.empty())
1443
45
    return;
1444
66
1445
66
  // Sort by first use so that we emit the warnings in a deterministic order.
1446
66
  SourceManager &SM = S.getSourceManager();
1447
66
  llvm::sort(UsesByStmt,
1448
66
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1449
24
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1450
24
                                                   RHS.first->getBeginLoc());
1451
24
             });
1452
66
1453
66
  // Classify the current code body for better warning text.
1454
66
  // This enum should stay in sync with the cases in
1455
66
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1456
66
  // FIXME: Should we use a common classification enum and the same set of
1457
66
  // possibilities all throughout Sema?
1458
66
  enum {
1459
66
    Function,
1460
66
    Method,
1461
66
    Block,
1462
66
    Lambda
1463
66
  } FunctionKind;
1464
66
1465
66
  if (isa<sema::BlockScopeInfo>(CurFn))
1466
2
    FunctionKind = Block;
1467
64
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1468
0
    FunctionKind = Lambda;
1469
64
  else if (isa<ObjCMethodDecl>(D))
1470
6
    FunctionKind = Method;
1471
58
  else
1472
58
    FunctionKind = Function;
1473
66
1474
66
  // Iterate through the sorted problems and emit warnings for each.
1475
82
  for (const auto &P : UsesByStmt) {
1476
82
    const Stmt *FirstRead = P.first;
1477
82
    const WeakObjectProfileTy &Key = P.second->first;
1478
82
    const WeakUseVector &Uses = P.second->second;
1479
82
1480
82
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1481
82
    // may not contain enough information to determine that these are different
1482
82
    // properties. We can only be 100% sure of a repeated use in certain cases,
1483
82
    // and we adjust the diagnostic kind accordingly so that the less certain
1484
82
    // case can be turned off if it is too noisy.
1485
82
    unsigned DiagKind;
1486
82
    if (Key.isExactProfile())
1487
68
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1488
14
    else
1489
14
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1490
82
1491
82
    // Classify the weak object being accessed for better warning text.
1492
82
    // This enum should stay in sync with the cases in
1493
82
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1494
82
    enum {
1495
82
      Variable,
1496
82
      Property,
1497
82
      ImplicitProperty,
1498
82
      Ivar
1499
82
    } ObjectKind;
1500
82
1501
82
    const NamedDecl *KeyProp = Key.getProperty();
1502
82
    if (isa<VarDecl>(KeyProp))
1503
6
      ObjectKind = Variable;
1504
76
    else if (isa<ObjCPropertyDecl>(KeyProp))
1505
64
      ObjectKind = Property;
1506
12
    else if (isa<ObjCMethodDecl>(KeyProp))
1507
4
      ObjectKind = ImplicitProperty;
1508
8
    else if (isa<ObjCIvarDecl>(KeyProp))
1509
8
      ObjectKind = Ivar;
1510
8
    else
1511
8
      
llvm_unreachable0
("Unexpected weak object kind!");
1512
82
1513
82
    // Do not warn about IBOutlet weak property receivers being set to null
1514
82
    // since they are typically only used from the main thread.
1515
82
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1516
64
      if (Prop->hasAttr<IBOutletAttr>())
1517
4
        continue;
1518
78
1519
78
    // Show the first time the object was read.
1520
78
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1521
78
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1522
78
        << FirstRead->getSourceRange();
1523
78
1524
78
    // Print all the other accesses as notes.
1525
164
    for (const auto &Use : Uses) {
1526
164
      if (Use.getUseExpr() == FirstRead)
1527
78
        continue;
1528
86
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1529
86
             diag::note_arc_weak_also_accessed_here)
1530
86
          << Use.getUseExpr()->getSourceRange();
1531
86
    }
1532
78
  }
1533
66
}
1534
1535
namespace {
1536
class UninitValsDiagReporter : public UninitVariablesHandler {
1537
  Sema &S;
1538
  typedef SmallVector<UninitUse, 2> UsesVec;
1539
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1540
  // Prefer using MapVector to DenseMap, so that iteration order will be
1541
  // the same as insertion order. This is needed to obtain a deterministic
1542
  // order of diagnostics when calling flushDiagnostics().
1543
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1544
  UsesMap uses;
1545
1546
public:
1547
21.7k
  UninitValsDiagReporter(Sema &S) : S(S) {}
1548
21.7k
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1549
1550
1.42k
  MappedType &getUses(const VarDecl *vd) {
1551
1.42k
    MappedType &V = uses[vd];
1552
1.42k
    if (!V.getPointer())
1553
795
      V.setPointer(new UsesVec());
1554
1.42k
    return V;
1555
1.42k
  }
1556
1557
  void handleUseOfUninitVariable(const VarDecl *vd,
1558
1.41k
                                 const UninitUse &use) override {
1559
1.41k
    getUses(vd).getPointer()->push_back(use);
1560
1.41k
  }
1561
1562
8
  void handleSelfInit(const VarDecl *vd) override {
1563
8
    getUses(vd).setInt(true);
1564
8
  }
1565
1566
21.7k
  void flushDiagnostics() {
1567
21.7k
    for (const auto &P : uses) {
1568
795
      const VarDecl *vd = P.first;
1569
795
      const MappedType &V = P.second;
1570
795
1571
795
      UsesVec *vec = V.getPointer();
1572
795
      bool hasSelfInit = V.getInt();
1573
795
1574
795
      // Specially handle the case where we have uses of an uninitialized
1575
795
      // variable, but the root cause is an idiomatic self-init.  We want
1576
795
      // to report the diagnostic at the self-init since that is the root cause.
1577
795
      if (!vec->empty() && 
hasSelfInit791
&&
hasAlwaysUninitializedUse(vec)4
)
1578
2
        DiagnoseUninitializedUse(S, vd,
1579
2
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1580
2
                                           /* isAlwaysUninit */ true),
1581
2
                                 /* alwaysReportSelfInit */ true);
1582
793
      else {
1583
793
        // Sort the uses by their SourceLocations.  While not strictly
1584
793
        // guaranteed to produce them in line/column order, this will provide
1585
793
        // a stable ordering.
1586
793
        llvm::sort(vec->begin(), vec->end(),
1587
793
                   [](const UninitUse &a, const UninitUse &b) {
1588
628
          // Prefer a more confident report over a less confident one.
1589
628
          if (a.getKind() != b.getKind())
1590
2
            return a.getKind() > b.getKind();
1591
626
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1592
626
        });
1593
793
1594
793
        for (const auto &U : *vec) {
1595
789
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1596
789
          UninitUse Use = hasSelfInit ? 
UninitUse(U.getUser(), false)2
:
U787
;
1597
789
1598
789
          if (DiagnoseUninitializedUse(S, vd, Use))
1599
789
            // Skip further diagnostics for this variable. We try to warn only
1600
789
            // on the first point at which a variable is used uninitialized.
1601
789
            break;
1602
789
        }
1603
793
      }
1604
795
1605
795
      // Release the uses vector.
1606
795
      delete vec;
1607
795
    }
1608
21.7k
1609
21.7k
    uses.clear();
1610
21.7k
  }
1611
1612
private:
1613
4
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1614
4
    return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
1615
4
      return U.getKind() == UninitUse::Always ||
1616
4
             
U.getKind() == UninitUse::AfterCall2
||
1617
4
             
U.getKind() == UninitUse::AfterDecl2
;
1618
4
    });
1619
4
  }
1620
};
1621
} // anonymous namespace
1622
1623
namespace clang {
1624
namespace {
1625
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1626
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1627
typedef std::list<DelayedDiag> DiagList;
1628
1629
struct SortDiagBySourceLocation {
1630
  SourceManager &SM;
1631
2.23k
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1632
1633
2.20k
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1634
2.20k
    // Although this call will be slow, this is only called when outputting
1635
2.20k
    // multiple warnings.
1636
2.20k
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1637
2.20k
  }
1638
};
1639
} // anonymous namespace
1640
} // namespace clang
1641
1642
//===----------------------------------------------------------------------===//
1643
// -Wthread-safety
1644
//===----------------------------------------------------------------------===//
1645
namespace clang {
1646
namespace threadSafety {
1647
namespace {
1648
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1649
  Sema &S;
1650
  DiagList Warnings;
1651
  SourceLocation FunLocation, FunEndLocation;
1652
1653
  const FunctionDecl *CurrentFunction;
1654
  bool Verbose;
1655
1656
2.01k
  OptionalNotes getNotes() const {
1657
2.01k
    if (Verbose && 
CurrentFunction14
) {
1658
14
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1659
14
                                S.PDiag(diag::note_thread_warning_in_fun)
1660
14
                                    << CurrentFunction);
1661
14
      return OptionalNotes(1, FNote);
1662
14
    }
1663
2.00k
    return OptionalNotes();
1664
2.00k
  }
1665
1666
376
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1667
376
    OptionalNotes ONS(1, Note);
1668
376
    if (Verbose && 
CurrentFunction6
) {
1669
6
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1670
6
                                S.PDiag(diag::note_thread_warning_in_fun)
1671
6
                                    << CurrentFunction);
1672
6
      ONS.push_back(std::move(FNote));
1673
6
    }
1674
376
    return ONS;
1675
376
  }
1676
1677
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1678
0
                         const PartialDiagnosticAt &Note2) const {
1679
0
    OptionalNotes ONS;
1680
0
    ONS.push_back(Note1);
1681
0
    ONS.push_back(Note2);
1682
0
    if (Verbose && CurrentFunction) {
1683
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1684
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1685
0
                                    << CurrentFunction);
1686
0
      ONS.push_back(std::move(FNote));
1687
0
    }
1688
0
    return ONS;
1689
0
  }
1690
1691
274
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1692
274
    return LocLocked.isValid()
1693
274
               ? getNotes(PartialDiagnosticAt(
1694
274
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1695
274
               : 
getNotes()0
;
1696
274
  }
1697
1698
 public:
1699
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1700
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1701
2.14k
      CurrentFunction(nullptr), Verbose(false) {}
1702
1703
40
  void setVerbose(bool b) { Verbose = b; }
1704
1705
  /// Emit all buffered diagnostics in order of sourcelocation.
1706
  /// We need to output diagnostics produced while iterating through
1707
  /// the lockset in deterministic order, so this function orders diagnostics
1708
  /// and outputs them.
1709
2.14k
  void emitDiagnostics() {
1710
2.14k
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1711
2.39k
    for (const auto &Diag : Warnings) {
1712
2.39k
      S.Diag(Diag.first.first, Diag.first.second);
1713
2.39k
      for (const auto &Note : Diag.second)
1714
396
        S.Diag(Note.first, Note.second);
1715
2.39k
    }
1716
2.14k
  }
1717
1718
0
  void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1719
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1720
0
                                         << Loc);
1721
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1722
0
  }
1723
1724
  void handleUnmatchedUnlock(StringRef Kind, Name LockName,
1725
100
                             SourceLocation Loc) override {
1726
100
    if (Loc.isInvalid())
1727
0
      Loc = FunLocation;
1728
100
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1729
100
                                         << Kind << LockName);
1730
100
    Warnings.emplace_back(std::move(Warning), getNotes());
1731
100
  }
1732
1733
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1734
                                 LockKind Expected, LockKind Received,
1735
                                 SourceLocation LocLocked,
1736
18
                                 SourceLocation LocUnlock) override {
1737
18
    if (LocUnlock.isInvalid())
1738
0
      LocUnlock = FunLocation;
1739
18
    PartialDiagnosticAt Warning(
1740
18
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1741
18
                       << Kind << LockName << Received << Expected);
1742
18
    Warnings.emplace_back(std::move(Warning),
1743
18
                          makeLockedHereNote(LocLocked, Kind));
1744
18
  }
1745
1746
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1747
79
                        SourceLocation LocDoubleLock) override {
1748
79
    if (LocDoubleLock.isInvalid())
1749
0
      LocDoubleLock = FunLocation;
1750
79
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1751
79
                                                   << Kind << LockName);
1752
79
    Warnings.emplace_back(std::move(Warning),
1753
79
                          makeLockedHereNote(LocLocked, Kind));
1754
79
  }
1755
1756
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1757
                                 SourceLocation LocLocked,
1758
                                 SourceLocation LocEndOfScope,
1759
177
                                 LockErrorKind LEK) override {
1760
177
    unsigned DiagID = 0;
1761
177
    switch (LEK) {
1762
46
      case LEK_LockedSomePredecessors:
1763
46
        DiagID = diag::warn_lock_some_predecessors;
1764
46
        break;
1765
34
      case LEK_LockedSomeLoopIterations:
1766
34
        DiagID = diag::warn_expecting_lock_held_on_loop;
1767
34
        break;
1768
65
      case LEK_LockedAtEndOfFunction:
1769
65
        DiagID = diag::warn_no_unlock;
1770
65
        break;
1771
32
      case LEK_NotLockedAtEndOfFunction:
1772
32
        DiagID = diag::warn_expecting_locked;
1773
32
        break;
1774
177
    }
1775
177
    if (LocEndOfScope.isInvalid())
1776
127
      LocEndOfScope = FunEndLocation;
1777
177
1778
177
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1779
177
                                                               << LockName);
1780
177
    Warnings.emplace_back(std::move(Warning),
1781
177
                          makeLockedHereNote(LocLocked, Kind));
1782
177
  }
1783
1784
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1785
                                SourceLocation Loc1,
1786
20
                                SourceLocation Loc2) override {
1787
20
    PartialDiagnosticAt Warning(Loc1,
1788
20
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1789
20
                                    << Kind << LockName);
1790
20
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1791
20
                                       << Kind << LockName);
1792
20
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1793
20
  }
1794
1795
  void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1796
                         ProtectedOperationKind POK, AccessKind AK,
1797
34
                         SourceLocation Loc) override {
1798
34
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1799
34
           "Only works for variables");
1800
34
    unsigned DiagID = POK == POK_VarAccess?
1801
25
                        diag::warn_variable_requires_any_lock:
1802
34
                        
diag::warn_var_deref_requires_any_lock9
;
1803
34
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1804
34
      << D << getLockKindFromAccessKind(AK));
1805
34
    Warnings.emplace_back(std::move(Warning), getNotes());
1806
34
  }
1807
1808
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1809
                          ProtectedOperationKind POK, Name LockName,
1810
                          LockKind LK, SourceLocation Loc,
1811
1.17k
                          Name *PossibleMatch) override {
1812
1.17k
    unsigned DiagID = 0;
1813
1.17k
    if (PossibleMatch) {
1814
76
      switch (POK) {
1815
56
        case POK_VarAccess:
1816
56
          DiagID = diag::warn_variable_requires_lock_precise;
1817
56
          break;
1818
0
        case POK_VarDereference:
1819
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1820
0
          break;
1821
20
        case POK_FunctionCall:
1822
20
          DiagID = diag::warn_fun_requires_lock_precise;
1823
20
          break;
1824
0
        case POK_PassByRef:
1825
0
          DiagID = diag::warn_guarded_pass_by_reference;
1826
0
          break;
1827
0
        case POK_PtPassByRef:
1828
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1829
0
          break;
1830
76
      }
1831
76
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1832
76
                                                       << D
1833
76
                                                       << LockName << LK);
1834
76
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1835
76
                                        << *PossibleMatch);
1836
76
      if (Verbose && 
POK == POK_VarAccess0
) {
1837
0
        PartialDiagnosticAt VNote(D->getLocation(),
1838
0
                                 S.PDiag(diag::note_guarded_by_declared_here)
1839
0
                                     << D->getNameAsString());
1840
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1841
0
      } else
1842
76
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1843
1.09k
    } else {
1844
1.09k
      switch (POK) {
1845
574
        case POK_VarAccess:
1846
574
          DiagID = diag::warn_variable_requires_lock;
1847
574
          break;
1848
204
        case POK_VarDereference:
1849
204
          DiagID = diag::warn_var_deref_requires_lock;
1850
204
          break;
1851
212
        case POK_FunctionCall:
1852
212
          DiagID = diag::warn_fun_requires_lock;
1853
212
          break;
1854
88
        case POK_PassByRef:
1855
88
          DiagID = diag::warn_guarded_pass_by_reference;
1856
88
          break;
1857
20
        case POK_PtPassByRef:
1858
20
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1859
20
          break;
1860
1.09k
      }
1861
1.09k
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1862
1.09k
                                                       << D
1863
1.09k
                                                       << LockName << LK);
1864
1.09k
      if (Verbose && 
POK == POK_VarAccess12
) {
1865
6
        PartialDiagnosticAt Note(D->getLocation(),
1866
6
                                 S.PDiag(diag::note_guarded_by_declared_here));
1867
6
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
1868
6
      } else
1869
1.09k
        Warnings.emplace_back(std::move(Warning), getNotes());
1870
1.09k
    }
1871
1.17k
  }
1872
1873
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
1874
643
                             SourceLocation Loc) override {
1875
643
    PartialDiagnosticAt Warning(Loc,
1876
643
        S.PDiag(diag::warn_acquire_requires_negative_cap)
1877
643
        << Kind << LockName << Neg);
1878
643
    Warnings.emplace_back(std::move(Warning), getNotes());
1879
643
  }
1880
1881
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
1882
77
                             SourceLocation Loc) override {
1883
77
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
1884
77
                                         << Kind << FunName << LockName);
1885
77
    Warnings.emplace_back(std::move(Warning), getNotes());
1886
77
  }
1887
1888
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
1889
49
                                SourceLocation Loc) override {
1890
49
    PartialDiagnosticAt Warning(Loc,
1891
49
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
1892
49
    Warnings.emplace_back(std::move(Warning), getNotes());
1893
49
  }
1894
1895
20
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
1896
20
    PartialDiagnosticAt Warning(Loc,
1897
20
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
1898
20
    Warnings.emplace_back(std::move(Warning), getNotes());
1899
20
  }
1900
1901
2.02k
  void enterFunction(const FunctionDecl* FD) override {
1902
2.02k
    CurrentFunction = FD;
1903
2.02k
  }
1904
1905
1.96k
  void leaveFunction(const FunctionDecl* FD) override {
1906
1.96k
    CurrentFunction = nullptr;
1907
1.96k
  }
1908
};
1909
} // anonymous namespace
1910
} // namespace threadSafety
1911
} // namespace clang
1912
1913
//===----------------------------------------------------------------------===//
1914
// -Wconsumed
1915
//===----------------------------------------------------------------------===//
1916
1917
namespace clang {
1918
namespace consumed {
1919
namespace {
1920
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
1921
1922
  Sema &S;
1923
  DiagList Warnings;
1924
1925
public:
1926
1927
96
  ConsumedWarningsHandler(Sema &S) : S(S) {}
1928
1929
93
  void emitDiagnostics() override {
1930
93
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1931
110
    for (const auto &Diag : Warnings) {
1932
110
      S.Diag(Diag.first.first, Diag.first.second);
1933
110
      for (const auto &Note : Diag.second)
1934
0
        S.Diag(Note.first, Note.second);
1935
110
    }
1936
93
  }
1937
1938
  void warnLoopStateMismatch(SourceLocation Loc,
1939
2
                             StringRef VariableName) override {
1940
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
1941
2
      VariableName);
1942
2
1943
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1944
2
  }
1945
1946
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
1947
                                        StringRef VariableName,
1948
                                        StringRef ExpectedState,
1949
2
                                        StringRef ObservedState) override {
1950
2
1951
2
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1952
2
      diag::warn_param_return_typestate_mismatch) << VariableName <<
1953
2
        ExpectedState << ObservedState);
1954
2
1955
2
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1956
2
  }
1957
1958
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1959
6
                                  StringRef ObservedState) override {
1960
6
1961
6
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1962
6
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
1963
6
1964
6
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1965
6
  }
1966
1967
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
1968
1
                                              StringRef TypeName) override {
1969
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1970
1
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
1971
1
1972
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1973
1
  }
1974
1975
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
1976
1
                                   StringRef ObservedState) override {
1977
1
1978
1
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1979
1
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
1980
1
1981
1
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1982
1
  }
1983
1984
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
1985
4
                                   SourceLocation Loc) override {
1986
4
1987
4
    PartialDiagnosticAt Warning(Loc, S.PDiag(
1988
4
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
1989
4
1990
4
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
1991
4
  }
1992
1993
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
1994
94
                             StringRef State, SourceLocation Loc) override {
1995
94
1996
94
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
1997
94
                                MethodName << VariableName << State);
1998
94
1999
94
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2000
94
  }
2001
};
2002
} // anonymous namespace
2003
} // namespace consumed
2004
} // namespace clang
2005
2006
//===----------------------------------------------------------------------===//
2007
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2008
//  warnings on a function, method, or block.
2009
//===----------------------------------------------------------------------===//
2010
2011
54.4k
clang::sema::AnalysisBasedWarnings::Policy::Policy() {
2012
54.4k
  enableCheckFallThrough = 1;
2013
54.4k
  enableCheckUnreachable = 0;
2014
54.4k
  enableThreadSafetyAnalysis = 0;
2015
54.4k
  enableConsumedAnalysis = 0;
2016
54.4k
}
2017
2018
326k
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2019
326k
  return (unsigned)!D.isIgnored(diag, SourceLocation());
2020
326k
}
2021
2022
clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2023
  : S(s),
2024
    NumFunctionsAnalyzed(0),
2025
    NumFunctionsWithBadCFGs(0),
2026
    NumCFGBlocks(0),
2027
    MaxCFGBlocksPerFunction(0),
2028
    NumUninitAnalysisFunctions(0),
2029
    NumUninitAnalysisVariables(0),
2030
    MaxUninitAnalysisVariablesPerFunction(0),
2031
    NumUninitAnalysisBlockVisits(0),
2032
54.4k
    MaxUninitAnalysisBlockVisitsPerFunction(0) {
2033
54.4k
2034
54.4k
  using namespace diag;
2035
54.4k
  DiagnosticsEngine &D = S.getDiagnostics();
2036
54.4k
2037
54.4k
  DefaultPolicy.enableCheckUnreachable =
2038
54.4k
    isEnabled(D, warn_unreachable) ||
2039
54.4k
    
isEnabled(D, warn_unreachable_break)54.4k
||
2040
54.4k
    
isEnabled(D, warn_unreachable_return)54.4k
||
2041
54.4k
    
isEnabled(D, warn_unreachable_loop_increment)54.4k
;
2042
54.4k
2043
54.4k
  DefaultPolicy.enableThreadSafetyAnalysis =
2044
54.4k
    isEnabled(D, warn_double_lock);
2045
54.4k
2046
54.4k
  DefaultPolicy.enableConsumedAnalysis =
2047
54.4k
    isEnabled(D, warn_use_in_invalid_state);
2048
54.4k
}
2049
2050
509
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2051
509
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2052
0
    S.Diag(D.Loc, D.PD);
2053
509
}
2054
2055
void clang::sema::
2056
AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
2057
                                     sema::FunctionScopeInfo *fscope,
2058
2.41M
                                     const Decl *D, QualType BlockType) {
2059
2.41M
2060
2.41M
  // We avoid doing analysis-based warnings when there are errors for
2061
2.41M
  // two reasons:
2062
2.41M
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2063
2.41M
  //     don't bother trying.
2064
2.41M
  // (2) The code already has problems; running the analysis just takes more
2065
2.41M
  //     time.
2066
2.41M
  DiagnosticsEngine &Diags = S.getDiagnostics();
2067
2.41M
2068
2.41M
  // Do not do any analysis if we are going to just ignore them.
2069
2.41M
  if (Diags.getIgnoreAllWarnings() ||
2070
2.41M
      
(2.22M
Diags.getSuppressSystemWarnings()2.22M
&&
2071
2.22M
       
S.SourceMgr.isInSystemHeader(D->getLocation())2.22M
))
2072
2.24M
    return;
2073
170k
2074
170k
  // For code in dependent contexts, we'll do this at instantiation time.
2075
170k
  if (cast<DeclContext>(D)->isDependentContext())
2076
9.19k
    return;
2077
161k
2078
161k
  if (Diags.hasUncompilableErrorOccurred()) {
2079
509
    // Flush out any possibly unreachable diagnostics.
2080
509
    flushDiagnostics(S, fscope);
2081
509
    return;
2082
509
  }
2083
160k
2084
160k
  const Stmt *Body = D->getBody();
2085
160k
  assert(Body);
2086
160k
2087
160k
  // Construct the analysis context with the specified CFG build options.
2088
160k
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2089
160k
2090
160k
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2091
160k
  // explosion for destructors that can result and the compile time hit.
2092
160k
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2093
160k
  AC.getCFGBuildOptions().AddEHEdges = false;
2094
160k
  AC.getCFGBuildOptions().AddInitializers = true;
2095
160k
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2096
160k
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2097
160k
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2098
160k
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2099
160k
2100
160k
  // Force that certain expressions appear as CFGElements in the CFG.  This
2101
160k
  // is used to speed up various analyses.
2102
160k
  // FIXME: This isn't the right factoring.  This is here for initial
2103
160k
  // prototyping, but we need a way for analyses to say what expressions they
2104
160k
  // expect to always be CFGElements and then fill in the BuildOptions
2105
160k
  // appropriately.  This is essentially a layering violation.
2106
160k
  if (P.enableCheckUnreachable || 
P.enableThreadSafetyAnalysis160k
||
2107
160k
      
P.enableConsumedAnalysis158k
) {
2108
2.35k
    // Unreachable code analysis and thread safety require a linearized CFG.
2109
2.35k
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2110
2.35k
  }
2111
158k
  else {
2112
158k
    AC.getCFGBuildOptions()
2113
158k
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2114
158k
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2115
158k
      .setAlwaysAdd(Stmt::BlockExprClass)
2116
158k
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2117
158k
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2118
158k
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2119
158k
      .setAlwaysAdd(Stmt::UnaryOperatorClass)
2120
158k
      .setAlwaysAdd(Stmt::AttributedStmtClass);
2121
158k
  }
2122
160k
2123
160k
  // Install the logical handler.
2124
160k
  llvm::Optional<LogicalErrorHandler> LEH;
2125
160k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2126
11.7k
    LEH.emplace(S);
2127
11.7k
    AC.getCFGBuildOptions().Observer = &*LEH;
2128
11.7k
  }
2129
160k
2130
160k
  // Emit delayed diagnostics.
2131
160k
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2132
2.57k
    bool analyzed = false;
2133
2.57k
2134
2.57k
    // Register the expressions with the CFGBuilder.
2135
7.31k
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2136
7.31k
      for (const Stmt *S : D.Stmts)
2137
7.55k
        AC.registerForcedBlockExpression(S);
2138
7.31k
    }
2139
2.57k
2140
2.57k
    if (AC.getCFG()) {
2141
2.57k
      analyzed = true;
2142
7.31k
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2143
7.31k
        bool AllReachable = true;
2144
7.55k
        for (const Stmt *S : D.Stmts) {
2145
7.55k
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2146
7.55k
          CFGReverseBlockReachabilityAnalysis *cra =
2147
7.55k
              AC.getCFGReachablityAnalysis();
2148
7.55k
          // FIXME: We should be able to assert that block is non-null, but
2149
7.55k
          // the CFG analysis can skip potentially-evaluated expressions in
2150
7.55k
          // edge cases; see test/Sema/vla-2.c.
2151
7.55k
          if (block && 
cra3.01k
) {
2152
3.01k
            // Can this block be reached from the entrance?
2153
3.01k
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2154
69
              AllReachable = false;
2155
69
              break;
2156
69
            }
2157
3.01k
          }
2158
7.55k
          // If we cannot map to a basic block, assume the statement is
2159
7.55k
          // reachable.
2160
7.55k
        }
2161
7.31k
2162
7.31k
        if (AllReachable)
2163
7.24k
          S.Diag(D.Loc, D.PD);
2164
7.31k
      }
2165
2.57k
    }
2166
2.57k
2167
2.57k
    if (!analyzed)
2168
0
      flushDiagnostics(S, fscope);
2169
2.57k
  }
2170
160k
2171
160k
  // Warning: check missing 'return'
2172
160k
  if (P.enableCheckFallThrough) {
2173
155k
    const CheckFallThroughDiagnostics &CD =
2174
155k
        (isa<BlockDecl>(D)
2175
155k
             ? 
CheckFallThroughDiagnostics::MakeForBlock()1.94k
2176
155k
             : 
(153k
isa<CXXMethodDecl>(D)153k
&&
2177
153k
                
cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call39.0k
&&
2178
153k
                
cast<CXXMethodDecl>(D)->getParent()->isLambda()2.70k
)
2179
153k
                   ? 
CheckFallThroughDiagnostics::MakeForLambda()2.63k
2180
153k
                   : (fscope->isCoroutine()
2181
150k
                          ? 
CheckFallThroughDiagnostics::MakeForCoroutine(D)67
2182
150k
                          : 
CheckFallThroughDiagnostics::MakeForFunction(D)150k
));
2183
155k
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2184
155k
  }
2185
160k
2186
160k
  // Warning: check for unreachable code
2187
160k
  if (P.enableCheckUnreachable) {
2188
161
    // Only check for unreachable code on non-template instantiations.
2189
161
    // Different template instantiations can effectively change the control-flow
2190
161
    // and it is very difficult to prove that a snippet of code in a template
2191
161
    // is unreachable for all instantiations.
2192
161
    bool isTemplateInstantiation = false;
2193
161
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2194
157
      isTemplateInstantiation = Function->isTemplateInstantiation();
2195
161
    if (!isTemplateInstantiation)
2196
157
      CheckUnreachable(S, AC);
2197
161
  }
2198
160k
2199
160k
  // Check for thread safety violations
2200
160k
  if (P.enableThreadSafetyAnalysis) {
2201
2.14k
    SourceLocation FL = AC.getDecl()->getLocation();
2202
2.14k
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2203
2.14k
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2204
2.14k
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2205
2.06k
      Reporter.setIssueBetaWarnings(true);
2206
2.14k
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2207
40
      Reporter.setVerbose(true);
2208
2.14k
2209
2.14k
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2210
2.14k
                                          &S.ThreadSafetyDeclCache);
2211
2.14k
    Reporter.emitDiagnostics();
2212
2.14k
  }
2213
160k
2214
160k
  // Check for violations of consumed properties.
2215
160k
  if (P.enableConsumedAnalysis) {
2216
96
    consumed::ConsumedWarningsHandler WarningHandler(S);
2217
96
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2218
96
    Analyzer.run(AC);
2219
96
  }
2220
160k
2221
160k
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2222
160k
      
!Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc())139k
||
2223
160k
      
!Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc())139k
) {
2224
21.7k
    if (CFG *cfg = AC.getCFG()) {
2225
21.7k
      UninitValsDiagReporter reporter(S);
2226
21.7k
      UninitVariablesAnalysisStats stats;
2227
21.7k
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2228
21.7k
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2229
21.7k
                                        reporter, stats);
2230
21.7k
2231
21.7k
      if (S.CollectStats && 
stats.NumVariablesAnalyzed > 00
) {
2232
0
        ++NumUninitAnalysisFunctions;
2233
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2234
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2235
0
        MaxUninitAnalysisVariablesPerFunction =
2236
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2237
0
                     stats.NumVariablesAnalyzed);
2238
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2239
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2240
0
                     stats.NumBlockVisits);
2241
0
      }
2242
21.7k
    }
2243
21.7k
  }
2244
160k
2245
160k
  bool FallThroughDiagFull =
2246
160k
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2247
160k
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2248
160k
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2249
160k
  if (FallThroughDiagFull || 
FallThroughDiagPerFunction160k
||
2250
160k
      
fscope->HasFallthroughStmt160k
) {
2251
119
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2252
119
  }
2253
160k
2254
160k
  if (S.getLangOpts().ObjCWeak &&
2255
160k
      
!Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc())1.32k
)
2256
111
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2257
160k
2258
160k
2259
160k
  // Check for infinite self-recursion in functions
2260
160k
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2261
160k
                       D->getBeginLoc())) {
2262
11.7k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2263
11.7k
      checkRecursiveFunction(S, FD, Body, AC);
2264
11.7k
    }
2265
11.7k
  }
2266
160k
2267
160k
  // Check for throw out of non-throwing function.
2268
160k
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2269
160k
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2270
153k
      if (S.getLangOpts().CPlusPlus && 
isNoexcept(FD)101k
)
2271
5.56k
        checkThrowInNonThrowingFunc(S, FD, AC);
2272
160k
2273
160k
  // If none of the previous checks caused a CFG build, trigger one here
2274
160k
  // for the logical error handler.
2275
160k
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2276
11.7k
    AC.getCFG();
2277
11.7k
  }
2278
160k
2279
160k
  // Collect statistics about the CFG if it was built.
2280
160k
  if (S.CollectStats && 
AC.isCFGBuilt()3
) {
2281
3
    ++NumFunctionsAnalyzed;
2282
3
    if (CFG *cfg = AC.getCFG()) {
2283
3
      // If we successfully built a CFG for this context, record some more
2284
3
      // detail information about it.
2285
3
      NumCFGBlocks += cfg->getNumBlockIDs();
2286
3
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2287
3
                                         cfg->getNumBlockIDs());
2288
3
    } else {
2289
0
      ++NumFunctionsWithBadCFGs;
2290
0
    }
2291
3
  }
2292
160k
}
2293
2294
3
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2295
3
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2296
3
2297
3
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2298
3
  unsigned AvgCFGBlocksPerFunction =
2299
3
      !NumCFGsBuilt ? 
00
: NumCFGBlocks/NumCFGsBuilt;
2300
3
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2301
3
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2302
3
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2303
3
               << "  " << AvgCFGBlocksPerFunction
2304
3
               << " average CFG blocks per function.\n"
2305
3
               << "  " << MaxCFGBlocksPerFunction
2306
3
               << " max CFG blocks per function.\n";
2307
3
2308
3
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2309
3
      : 
NumUninitAnalysisVariables/NumUninitAnalysisFunctions0
;
2310
3
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2311
3
      : 
NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions0
;
2312
3
  llvm::errs() << NumUninitAnalysisFunctions
2313
3
               << " functions analyzed for uninitialiazed variables\n"
2314
3
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2315
3
               << "  " << AvgUninitVariablesPerFunction
2316
3
               << " average variables per function.\n"
2317
3
               << "  " << MaxUninitAnalysisVariablesPerFunction
2318
3
               << " max variables per function.\n"
2319
3
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2320
3
               << "  " << AvgUninitBlockVisitsPerFunction
2321
3
               << " average block visits per function.\n"
2322
3
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2323
3
               << " max block visits per function.\n";
2324
3
}