Coverage Report

Created: 2024-01-17 10:31

/src/llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp
Line
Count
Source (jump to first uncovered line)
1
//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file defines analysis_warnings::[Policy,Executor].
10
// Together they are used by Sema to issue warnings based on inexpensive
11
// static analysis algorithms in libAnalysis.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "clang/Sema/AnalysisBasedWarnings.h"
16
#include "clang/AST/Decl.h"
17
#include "clang/AST/DeclCXX.h"
18
#include "clang/AST/DeclObjC.h"
19
#include "clang/AST/EvaluatedExprVisitor.h"
20
#include "clang/AST/Expr.h"
21
#include "clang/AST/ExprCXX.h"
22
#include "clang/AST/ExprObjC.h"
23
#include "clang/AST/OperationKinds.h"
24
#include "clang/AST/ParentMap.h"
25
#include "clang/AST/RecursiveASTVisitor.h"
26
#include "clang/AST/StmtCXX.h"
27
#include "clang/AST/StmtObjC.h"
28
#include "clang/AST/StmtVisitor.h"
29
#include "clang/AST/RecursiveASTVisitor.h"
30
#include "clang/AST/Type.h"
31
#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
32
#include "clang/Analysis/Analyses/CalledOnceCheck.h"
33
#include "clang/Analysis/Analyses/Consumed.h"
34
#include "clang/Analysis/Analyses/ReachableCode.h"
35
#include "clang/Analysis/Analyses/ThreadSafety.h"
36
#include "clang/Analysis/Analyses/UninitializedValues.h"
37
#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
38
#include "clang/Analysis/AnalysisDeclContext.h"
39
#include "clang/Analysis/CFG.h"
40
#include "clang/Analysis/CFGStmtMap.h"
41
#include "clang/Basic/Diagnostic.h"
42
#include "clang/Basic/SourceLocation.h"
43
#include "clang/Basic/SourceManager.h"
44
#include "clang/Lex/Preprocessor.h"
45
#include "clang/Sema/ScopeInfo.h"
46
#include "clang/Sema/SemaInternal.h"
47
#include "llvm/ADT/ArrayRef.h"
48
#include "llvm/ADT/BitVector.h"
49
#include "llvm/ADT/MapVector.h"
50
#include "llvm/ADT/STLFunctionalExtras.h"
51
#include "llvm/ADT/SmallString.h"
52
#include "llvm/ADT/SmallVector.h"
53
#include "llvm/ADT/StringRef.h"
54
#include "llvm/Support/Casting.h"
55
#include <algorithm>
56
#include <deque>
57
#include <iterator>
58
#include <optional>
59
60
using namespace clang;
61
62
//===----------------------------------------------------------------------===//
63
// Unreachable code analysis.
64
//===----------------------------------------------------------------------===//
65
66
namespace {
67
  class UnreachableCodeHandler : public reachable_code::Callback {
68
    Sema &S;
69
    SourceRange PreviousSilenceableCondVal;
70
71
  public:
72
0
    UnreachableCodeHandler(Sema &s) : S(s) {}
73
74
    void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
75
                           SourceRange SilenceableCondVal, SourceRange R1,
76
0
                           SourceRange R2, bool HasFallThroughAttr) override {
77
      // If the diagnosed code is `[[fallthrough]];` and
78
      // `-Wunreachable-code-fallthrough` is  enabled, suppress `code will never
79
      // be executed` warning to avoid generating diagnostic twice
80
0
      if (HasFallThroughAttr &&
81
0
          !S.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr,
82
0
                                        SourceLocation()))
83
0
        return;
84
85
      // Avoid reporting multiple unreachable code diagnostics that are
86
      // triggered by the same conditional value.
87
0
      if (PreviousSilenceableCondVal.isValid() &&
88
0
          SilenceableCondVal.isValid() &&
89
0
          PreviousSilenceableCondVal == SilenceableCondVal)
90
0
        return;
91
0
      PreviousSilenceableCondVal = SilenceableCondVal;
92
93
0
      unsigned diag = diag::warn_unreachable;
94
0
      switch (UK) {
95
0
        case reachable_code::UK_Break:
96
0
          diag = diag::warn_unreachable_break;
97
0
          break;
98
0
        case reachable_code::UK_Return:
99
0
          diag = diag::warn_unreachable_return;
100
0
          break;
101
0
        case reachable_code::UK_Loop_Increment:
102
0
          diag = diag::warn_unreachable_loop_increment;
103
0
          break;
104
0
        case reachable_code::UK_Other:
105
0
          break;
106
0
      }
107
108
0
      S.Diag(L, diag) << R1 << R2;
109
110
0
      SourceLocation Open = SilenceableCondVal.getBegin();
111
0
      if (Open.isValid()) {
112
0
        SourceLocation Close = SilenceableCondVal.getEnd();
113
0
        Close = S.getLocForEndOfToken(Close);
114
0
        if (Close.isValid()) {
115
0
          S.Diag(Open, diag::note_unreachable_silence)
116
0
            << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
117
0
            << FixItHint::CreateInsertion(Close, ")");
118
0
        }
119
0
      }
120
0
    }
121
  };
122
} // anonymous namespace
123
124
/// CheckUnreachable - Check for unreachable code.
125
0
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
126
  // As a heuristic prune all diagnostics not in the main file.  Currently
127
  // the majority of warnings in headers are false positives.  These
128
  // are largely caused by configuration state, e.g. preprocessor
129
  // defined code, etc.
130
  //
131
  // Note that this is also a performance optimization.  Analyzing
132
  // headers many times can be expensive.
133
0
  if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
134
0
    return;
135
136
0
  UnreachableCodeHandler UC(S);
137
0
  reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
138
0
}
139
140
namespace {
141
/// Warn on logical operator errors in CFGBuilder
142
class LogicalErrorHandler : public CFGCallback {
143
  Sema &S;
144
145
public:
146
0
  LogicalErrorHandler(Sema &S) : S(S) {}
147
148
0
  static bool HasMacroID(const Expr *E) {
149
0
    if (E->getExprLoc().isMacroID())
150
0
      return true;
151
152
    // Recurse to children.
153
0
    for (const Stmt *SubStmt : E->children())
154
0
      if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
155
0
        if (HasMacroID(SubExpr))
156
0
          return true;
157
158
0
    return false;
159
0
  }
160
161
0
  void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
162
0
    if (HasMacroID(B))
163
0
      return;
164
165
0
    unsigned DiagID = isAlwaysTrue
166
0
                          ? diag::warn_tautological_negation_or_compare
167
0
                          : diag::warn_tautological_negation_and_compare;
168
0
    SourceRange DiagRange = B->getSourceRange();
169
0
    S.Diag(B->getExprLoc(), DiagID) << DiagRange;
170
0
  }
171
172
0
  void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
173
0
    if (HasMacroID(B))
174
0
      return;
175
176
0
    SourceRange DiagRange = B->getSourceRange();
177
0
    S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
178
0
        << DiagRange << isAlwaysTrue;
179
0
  }
180
181
  void compareBitwiseEquality(const BinaryOperator *B,
182
0
                              bool isAlwaysTrue) override {
183
0
    if (HasMacroID(B))
184
0
      return;
185
186
0
    SourceRange DiagRange = B->getSourceRange();
187
0
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
188
0
        << DiagRange << isAlwaysTrue;
189
0
  }
190
191
0
  void compareBitwiseOr(const BinaryOperator *B) override {
192
0
    if (HasMacroID(B))
193
0
      return;
194
195
0
    SourceRange DiagRange = B->getSourceRange();
196
0
    S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
197
0
  }
198
199
  static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
200
0
                                   SourceLocation Loc) {
201
0
    return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
202
0
           !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc) ||
203
0
           !Diags.isIgnored(diag::warn_tautological_negation_and_compare, Loc);
204
0
  }
205
};
206
} // anonymous namespace
207
208
//===----------------------------------------------------------------------===//
209
// Check for infinite self-recursion in functions
210
//===----------------------------------------------------------------------===//
211
212
// Returns true if the function is called anywhere within the CFGBlock.
213
// For member functions, the additional condition of being call from the
214
// this pointer is required.
215
0
static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
216
  // Process all the Stmt's in this block to find any calls to FD.
217
0
  for (const auto &B : Block) {
218
0
    if (B.getKind() != CFGElement::Statement)
219
0
      continue;
220
221
0
    const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
222
0
    if (!CE || !CE->getCalleeDecl() ||
223
0
        CE->getCalleeDecl()->getCanonicalDecl() != FD)
224
0
      continue;
225
226
    // Skip function calls which are qualified with a templated class.
227
0
    if (const DeclRefExpr *DRE =
228
0
            dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
229
0
      if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
230
0
        if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
231
0
            isa<TemplateSpecializationType>(NNS->getAsType())) {
232
0
          continue;
233
0
        }
234
0
      }
235
0
    }
236
237
0
    const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
238
0
    if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
239
0
        !MCE->getMethodDecl()->isVirtual())
240
0
      return true;
241
0
  }
242
0
  return false;
243
0
}
244
245
// Returns true if every path from the entry block passes through a call to FD.
246
0
static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
247
0
  llvm::SmallPtrSet<CFGBlock *, 16> Visited;
248
0
  llvm::SmallVector<CFGBlock *, 16> WorkList;
249
  // Keep track of whether we found at least one recursive path.
250
0
  bool foundRecursion = false;
251
252
0
  const unsigned ExitID = cfg->getExit().getBlockID();
253
254
  // Seed the work list with the entry block.
255
0
  WorkList.push_back(&cfg->getEntry());
256
257
0
  while (!WorkList.empty()) {
258
0
    CFGBlock *Block = WorkList.pop_back_val();
259
260
0
    for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
261
0
      if (CFGBlock *SuccBlock = *I) {
262
0
        if (!Visited.insert(SuccBlock).second)
263
0
          continue;
264
265
        // Found a path to the exit node without a recursive call.
266
0
        if (ExitID == SuccBlock->getBlockID())
267
0
          return false;
268
269
        // If the successor block contains a recursive call, end analysis there.
270
0
        if (hasRecursiveCallInPath(FD, *SuccBlock)) {
271
0
          foundRecursion = true;
272
0
          continue;
273
0
        }
274
275
0
        WorkList.push_back(SuccBlock);
276
0
      }
277
0
    }
278
0
  }
279
0
  return foundRecursion;
280
0
}
281
282
static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
283
0
                                   const Stmt *Body, AnalysisDeclContext &AC) {
284
0
  FD = FD->getCanonicalDecl();
285
286
  // Only run on non-templated functions and non-templated members of
287
  // templated classes.
288
0
  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
289
0
      FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
290
0
    return;
291
292
0
  CFG *cfg = AC.getCFG();
293
0
  if (!cfg) return;
294
295
  // If the exit block is unreachable, skip processing the function.
296
0
  if (cfg->getExit().pred_empty())
297
0
    return;
298
299
  // Emit diagnostic if a recursive function call is detected for all paths.
300
0
  if (checkForRecursiveFunctionCall(FD, cfg))
301
0
    S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
302
0
}
303
304
//===----------------------------------------------------------------------===//
305
// Check for throw in a non-throwing function.
306
//===----------------------------------------------------------------------===//
307
308
/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
309
/// can reach ExitBlock.
310
static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
311
0
                         CFG *Body) {
312
0
  SmallVector<CFGBlock *, 16> Stack;
313
0
  llvm::BitVector Queued(Body->getNumBlockIDs());
314
315
0
  Stack.push_back(&ThrowBlock);
316
0
  Queued[ThrowBlock.getBlockID()] = true;
317
318
0
  while (!Stack.empty()) {
319
0
    CFGBlock &UnwindBlock = *Stack.back();
320
0
    Stack.pop_back();
321
322
0
    for (auto &Succ : UnwindBlock.succs()) {
323
0
      if (!Succ.isReachable() || Queued[Succ->getBlockID()])
324
0
        continue;
325
326
0
      if (Succ->getBlockID() == Body->getExit().getBlockID())
327
0
        return true;
328
329
0
      if (auto *Catch =
330
0
              dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
331
0
        QualType Caught = Catch->getCaughtType();
332
0
        if (Caught.isNull() || // catch (...) catches everything
333
0
            !E->getSubExpr() || // throw; is considered cuaght by any handler
334
0
            S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
335
          // Exception doesn't escape via this path.
336
0
          break;
337
0
      } else {
338
0
        Stack.push_back(Succ);
339
0
        Queued[Succ->getBlockID()] = true;
340
0
      }
341
0
    }
342
0
  }
343
344
0
  return false;
345
0
}
346
347
static void visitReachableThrows(
348
    CFG *BodyCFG,
349
0
    llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
350
0
  llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
351
0
  clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
352
0
  for (CFGBlock *B : *BodyCFG) {
353
0
    if (!Reachable[B->getBlockID()])
354
0
      continue;
355
0
    for (CFGElement &E : *B) {
356
0
      std::optional<CFGStmt> S = E.getAs<CFGStmt>();
357
0
      if (!S)
358
0
        continue;
359
0
      if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
360
0
        Visit(Throw, *B);
361
0
    }
362
0
  }
363
0
}
364
365
static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
366
0
                                                 const FunctionDecl *FD) {
367
0
  if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
368
0
      FD->getTypeSourceInfo()) {
369
0
    S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
370
0
    if (S.getLangOpts().CPlusPlus11 &&
371
0
        (isa<CXXDestructorDecl>(FD) ||
372
0
         FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
373
0
         FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
374
0
      if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
375
0
                                         getAs<FunctionProtoType>())
376
0
        S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
377
0
            << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
378
0
            << FD->getExceptionSpecSourceRange();
379
0
    } else
380
0
      S.Diag(FD->getLocation(), diag::note_throw_in_function)
381
0
          << FD->getExceptionSpecSourceRange();
382
0
  }
383
0
}
384
385
static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
386
0
                                        AnalysisDeclContext &AC) {
387
0
  CFG *BodyCFG = AC.getCFG();
388
0
  if (!BodyCFG)
389
0
    return;
390
0
  if (BodyCFG->getExit().pred_empty())
391
0
    return;
392
0
  visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
393
0
    if (throwEscapes(S, Throw, Block, BodyCFG))
394
0
      EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
395
0
  });
396
0
}
397
398
0
static bool isNoexcept(const FunctionDecl *FD) {
399
0
  const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
400
0
  if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
401
0
    return true;
402
0
  return false;
403
0
}
404
405
//===----------------------------------------------------------------------===//
406
// Check for missing return value.
407
//===----------------------------------------------------------------------===//
408
409
enum ControlFlowKind {
410
  UnknownFallThrough,
411
  NeverFallThrough,
412
  MaybeFallThrough,
413
  AlwaysFallThrough,
414
  NeverFallThroughOrReturn
415
};
416
417
/// CheckFallThrough - Check that we don't fall off the end of a
418
/// Statement that should return a value.
419
///
420
/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
421
/// MaybeFallThrough iff we might or might not fall off the end,
422
/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
423
/// return.  We assume NeverFallThrough iff we never fall off the end of the
424
/// statement but we may return.  We assume that functions not marked noreturn
425
/// will return.
426
0
static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
427
0
  CFG *cfg = AC.getCFG();
428
0
  if (!cfg) return UnknownFallThrough;
429
430
  // The CFG leaves in dead things, and we don't want the dead code paths to
431
  // confuse us, so we mark all live things first.
432
0
  llvm::BitVector live(cfg->getNumBlockIDs());
433
0
  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
434
0
                                                          live);
435
436
0
  bool AddEHEdges = AC.getAddEHEdges();
437
0
  if (!AddEHEdges && count != cfg->getNumBlockIDs())
438
    // When there are things remaining dead, and we didn't add EH edges
439
    // from CallExprs to the catch clauses, we have to go back and
440
    // mark them as live.
441
0
    for (const auto *B : *cfg) {
442
0
      if (!live[B->getBlockID()]) {
443
0
        if (B->pred_begin() == B->pred_end()) {
444
0
          const Stmt *Term = B->getTerminatorStmt();
445
0
          if (Term && isa<CXXTryStmt>(Term))
446
            // When not adding EH edges from calls, catch clauses
447
            // can otherwise seem dead.  Avoid noting them as dead.
448
0
            count += reachable_code::ScanReachableFromBlock(B, live);
449
0
          continue;
450
0
        }
451
0
      }
452
0
    }
453
454
  // Now we know what is live, we check the live precessors of the exit block
455
  // and look for fall through paths, being careful to ignore normal returns,
456
  // and exceptional paths.
457
0
  bool HasLiveReturn = false;
458
0
  bool HasFakeEdge = false;
459
0
  bool HasPlainEdge = false;
460
0
  bool HasAbnormalEdge = false;
461
462
  // Ignore default cases that aren't likely to be reachable because all
463
  // enums in a switch(X) have explicit case statements.
464
0
  CFGBlock::FilterOptions FO;
465
0
  FO.IgnoreDefaultsWithCoveredEnums = 1;
466
467
0
  for (CFGBlock::filtered_pred_iterator I =
468
0
           cfg->getExit().filtered_pred_start_end(FO);
469
0
       I.hasMore(); ++I) {
470
0
    const CFGBlock &B = **I;
471
0
    if (!live[B.getBlockID()])
472
0
      continue;
473
474
    // Skip blocks which contain an element marked as no-return. They don't
475
    // represent actually viable edges into the exit block, so mark them as
476
    // abnormal.
477
0
    if (B.hasNoReturnElement()) {
478
0
      HasAbnormalEdge = true;
479
0
      continue;
480
0
    }
481
482
    // Destructors can appear after the 'return' in the CFG.  This is
483
    // normal.  We need to look pass the destructors for the return
484
    // statement (if it exists).
485
0
    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
486
487
0
    for ( ; ri != re ; ++ri)
488
0
      if (ri->getAs<CFGStmt>())
489
0
        break;
490
491
    // No more CFGElements in the block?
492
0
    if (ri == re) {
493
0
      const Stmt *Term = B.getTerminatorStmt();
494
0
      if (Term && (isa<CXXTryStmt>(Term) || isa<ObjCAtTryStmt>(Term))) {
495
0
        HasAbnormalEdge = true;
496
0
        continue;
497
0
      }
498
      // A labeled empty statement, or the entry block...
499
0
      HasPlainEdge = true;
500
0
      continue;
501
0
    }
502
503
0
    CFGStmt CS = ri->castAs<CFGStmt>();
504
0
    const Stmt *S = CS.getStmt();
505
0
    if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
506
0
      HasLiveReturn = true;
507
0
      continue;
508
0
    }
509
0
    if (isa<ObjCAtThrowStmt>(S)) {
510
0
      HasFakeEdge = true;
511
0
      continue;
512
0
    }
513
0
    if (isa<CXXThrowExpr>(S)) {
514
0
      HasFakeEdge = true;
515
0
      continue;
516
0
    }
517
0
    if (isa<MSAsmStmt>(S)) {
518
      // TODO: Verify this is correct.
519
0
      HasFakeEdge = true;
520
0
      HasLiveReturn = true;
521
0
      continue;
522
0
    }
523
0
    if (isa<CXXTryStmt>(S)) {
524
0
      HasAbnormalEdge = true;
525
0
      continue;
526
0
    }
527
0
    if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
528
0
      HasAbnormalEdge = true;
529
0
      continue;
530
0
    }
531
532
0
    HasPlainEdge = true;
533
0
  }
534
0
  if (!HasPlainEdge) {
535
0
    if (HasLiveReturn)
536
0
      return NeverFallThrough;
537
0
    return NeverFallThroughOrReturn;
538
0
  }
539
0
  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
540
0
    return MaybeFallThrough;
541
  // This says AlwaysFallThrough for calls to functions that are not marked
542
  // noreturn, that don't return.  If people would like this warning to be more
543
  // accurate, such functions should be marked as noreturn.
544
0
  return AlwaysFallThrough;
545
0
}
546
547
namespace {
548
549
struct CheckFallThroughDiagnostics {
550
  unsigned diag_MaybeFallThrough_HasNoReturn;
551
  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
552
  unsigned diag_AlwaysFallThrough_HasNoReturn;
553
  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
554
  unsigned diag_NeverFallThroughOrReturn;
555
  enum { Function, Block, Lambda, Coroutine } funMode;
556
  SourceLocation FuncLoc;
557
558
0
  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
559
0
    CheckFallThroughDiagnostics D;
560
0
    D.FuncLoc = Func->getLocation();
561
0
    D.diag_MaybeFallThrough_HasNoReturn =
562
0
      diag::warn_falloff_noreturn_function;
563
0
    D.diag_MaybeFallThrough_ReturnsNonVoid =
564
0
      diag::warn_maybe_falloff_nonvoid_function;
565
0
    D.diag_AlwaysFallThrough_HasNoReturn =
566
0
      diag::warn_falloff_noreturn_function;
567
0
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
568
0
      diag::warn_falloff_nonvoid_function;
569
570
    // Don't suggest that virtual functions be marked "noreturn", since they
571
    // might be overridden by non-noreturn functions.
572
0
    bool isVirtualMethod = false;
573
0
    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
574
0
      isVirtualMethod = Method->isVirtual();
575
576
    // Don't suggest that template instantiations be marked "noreturn"
577
0
    bool isTemplateInstantiation = false;
578
0
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
579
0
      isTemplateInstantiation = Function->isTemplateInstantiation();
580
581
0
    if (!isVirtualMethod && !isTemplateInstantiation)
582
0
      D.diag_NeverFallThroughOrReturn =
583
0
        diag::warn_suggest_noreturn_function;
584
0
    else
585
0
      D.diag_NeverFallThroughOrReturn = 0;
586
587
0
    D.funMode = Function;
588
0
    return D;
589
0
  }
590
591
0
  static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
592
0
    CheckFallThroughDiagnostics D;
593
0
    D.FuncLoc = Func->getLocation();
594
0
    D.diag_MaybeFallThrough_HasNoReturn = 0;
595
0
    D.diag_MaybeFallThrough_ReturnsNonVoid =
596
0
        diag::warn_maybe_falloff_nonvoid_coroutine;
597
0
    D.diag_AlwaysFallThrough_HasNoReturn = 0;
598
0
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
599
0
        diag::warn_falloff_nonvoid_coroutine;
600
0
    D.diag_NeverFallThroughOrReturn = 0;
601
0
    D.funMode = Coroutine;
602
0
    return D;
603
0
  }
604
605
0
  static CheckFallThroughDiagnostics MakeForBlock() {
606
0
    CheckFallThroughDiagnostics D;
607
0
    D.diag_MaybeFallThrough_HasNoReturn =
608
0
      diag::err_noreturn_block_has_return_expr;
609
0
    D.diag_MaybeFallThrough_ReturnsNonVoid =
610
0
      diag::err_maybe_falloff_nonvoid_block;
611
0
    D.diag_AlwaysFallThrough_HasNoReturn =
612
0
      diag::err_noreturn_block_has_return_expr;
613
0
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
614
0
      diag::err_falloff_nonvoid_block;
615
0
    D.diag_NeverFallThroughOrReturn = 0;
616
0
    D.funMode = Block;
617
0
    return D;
618
0
  }
619
620
0
  static CheckFallThroughDiagnostics MakeForLambda() {
621
0
    CheckFallThroughDiagnostics D;
622
0
    D.diag_MaybeFallThrough_HasNoReturn =
623
0
      diag::err_noreturn_lambda_has_return_expr;
624
0
    D.diag_MaybeFallThrough_ReturnsNonVoid =
625
0
      diag::warn_maybe_falloff_nonvoid_lambda;
626
0
    D.diag_AlwaysFallThrough_HasNoReturn =
627
0
      diag::err_noreturn_lambda_has_return_expr;
628
0
    D.diag_AlwaysFallThrough_ReturnsNonVoid =
629
0
      diag::warn_falloff_nonvoid_lambda;
630
0
    D.diag_NeverFallThroughOrReturn = 0;
631
0
    D.funMode = Lambda;
632
0
    return D;
633
0
  }
634
635
  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
636
0
                        bool HasNoReturn) const {
637
0
    if (funMode == Function) {
638
0
      return (ReturnsVoid ||
639
0
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
640
0
                          FuncLoc)) &&
641
0
             (!HasNoReturn ||
642
0
              D.isIgnored(diag::warn_noreturn_function_has_return_expr,
643
0
                          FuncLoc)) &&
644
0
             (!ReturnsVoid ||
645
0
              D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
646
0
    }
647
0
    if (funMode == Coroutine) {
648
0
      return (ReturnsVoid ||
649
0
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
650
0
              D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
651
0
                          FuncLoc)) &&
652
0
             (!HasNoReturn);
653
0
    }
654
    // For blocks / lambdas.
655
0
    return ReturnsVoid && !HasNoReturn;
656
0
  }
657
};
658
659
} // anonymous namespace
660
661
/// CheckFallThroughForBody - Check that we don't fall off the end of a
662
/// function that should return a value.  Check that we don't fall off the end
663
/// of a noreturn function.  We assume that functions and blocks not marked
664
/// noreturn will return.
665
static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
666
                                    QualType BlockType,
667
                                    const CheckFallThroughDiagnostics &CD,
668
                                    AnalysisDeclContext &AC,
669
0
                                    sema::FunctionScopeInfo *FSI) {
670
671
0
  bool ReturnsVoid = false;
672
0
  bool HasNoReturn = false;
673
0
  bool IsCoroutine = FSI->isCoroutine();
674
675
0
  if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
676
0
    if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
677
0
      ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
678
0
    else
679
0
      ReturnsVoid = FD->getReturnType()->isVoidType();
680
0
    HasNoReturn = FD->isNoReturn();
681
0
  }
682
0
  else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
683
0
    ReturnsVoid = MD->getReturnType()->isVoidType();
684
0
    HasNoReturn = MD->hasAttr<NoReturnAttr>();
685
0
  }
686
0
  else if (isa<BlockDecl>(D)) {
687
0
    if (const FunctionType *FT =
688
0
          BlockType->getPointeeType()->getAs<FunctionType>()) {
689
0
      if (FT->getReturnType()->isVoidType())
690
0
        ReturnsVoid = true;
691
0
      if (FT->getNoReturnAttr())
692
0
        HasNoReturn = true;
693
0
    }
694
0
  }
695
696
0
  DiagnosticsEngine &Diags = S.getDiagnostics();
697
698
  // Short circuit for compilation speed.
699
0
  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
700
0
      return;
701
0
  SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
702
0
  auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
703
0
    if (IsCoroutine)
704
0
      S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
705
0
    else
706
0
      S.Diag(Loc, DiagID);
707
0
  };
708
709
  // cpu_dispatch functions permit empty function bodies for ICC compatibility.
710
0
  if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
711
0
    return;
712
713
  // Either in a function body compound statement, or a function-try-block.
714
0
  switch (CheckFallThrough(AC)) {
715
0
    case UnknownFallThrough:
716
0
      break;
717
718
0
    case MaybeFallThrough:
719
0
      if (HasNoReturn)
720
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
721
0
      else if (!ReturnsVoid)
722
0
        EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
723
0
      break;
724
0
    case AlwaysFallThrough:
725
0
      if (HasNoReturn)
726
0
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
727
0
      else if (!ReturnsVoid)
728
0
        EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
729
0
      break;
730
0
    case NeverFallThroughOrReturn:
731
0
      if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
732
0
        if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
733
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
734
0
        } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
735
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
736
0
        } else {
737
0
          S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
738
0
        }
739
0
      }
740
0
      break;
741
0
    case NeverFallThrough:
742
0
      break;
743
0
  }
744
0
}
745
746
//===----------------------------------------------------------------------===//
747
// -Wuninitialized
748
//===----------------------------------------------------------------------===//
749
750
namespace {
751
/// ContainsReference - A visitor class to search for references to
752
/// a particular declaration (the needle) within any evaluated component of an
753
/// expression (recursively).
754
class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
755
  bool FoundReference;
756
  const DeclRefExpr *Needle;
757
758
public:
759
  typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
760
761
  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
762
0
    : Inherited(Context), FoundReference(false), Needle(Needle) {}
763
764
0
  void VisitExpr(const Expr *E) {
765
    // Stop evaluating if we already have a reference.
766
0
    if (FoundReference)
767
0
      return;
768
769
0
    Inherited::VisitExpr(E);
770
0
  }
771
772
0
  void VisitDeclRefExpr(const DeclRefExpr *E) {
773
0
    if (E == Needle)
774
0
      FoundReference = true;
775
0
    else
776
0
      Inherited::VisitDeclRefExpr(E);
777
0
  }
778
779
0
  bool doesContainReference() const { return FoundReference; }
780
};
781
} // anonymous namespace
782
783
0
static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
784
0
  QualType VariableTy = VD->getType().getCanonicalType();
785
0
  if (VariableTy->isBlockPointerType() &&
786
0
      !VD->hasAttr<BlocksAttr>()) {
787
0
    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
788
0
        << VD->getDeclName()
789
0
        << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
790
0
    return true;
791
0
  }
792
793
  // Don't issue a fixit if there is already an initializer.
794
0
  if (VD->getInit())
795
0
    return false;
796
797
  // Don't suggest a fixit inside macros.
798
0
  if (VD->getEndLoc().isMacroID())
799
0
    return false;
800
801
0
  SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
802
803
  // Suggest possible initialization (if any).
804
0
  std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
805
0
  if (Init.empty())
806
0
    return false;
807
808
0
  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
809
0
    << FixItHint::CreateInsertion(Loc, Init);
810
0
  return true;
811
0
}
812
813
/// Create a fixit to remove an if-like statement, on the assumption that its
814
/// condition is CondVal.
815
static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
816
                          const Stmt *Else, bool CondVal,
817
0
                          FixItHint &Fixit1, FixItHint &Fixit2) {
818
0
  if (CondVal) {
819
    // If condition is always true, remove all but the 'then'.
820
0
    Fixit1 = FixItHint::CreateRemoval(
821
0
        CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
822
0
    if (Else) {
823
0
      SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
824
0
      Fixit2 =
825
0
          FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
826
0
    }
827
0
  } else {
828
    // If condition is always false, remove all but the 'else'.
829
0
    if (Else)
830
0
      Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
831
0
          If->getBeginLoc(), Else->getBeginLoc()));
832
0
    else
833
0
      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
834
0
  }
835
0
}
836
837
/// DiagUninitUse -- Helper function to produce a diagnostic for an
838
/// uninitialized use of a variable.
839
static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
840
0
                          bool IsCapturedByBlock) {
841
0
  bool Diagnosed = false;
842
843
0
  switch (Use.getKind()) {
844
0
  case UninitUse::Always:
845
0
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
846
0
        << VD->getDeclName() << IsCapturedByBlock
847
0
        << Use.getUser()->getSourceRange();
848
0
    return;
849
850
0
  case UninitUse::AfterDecl:
851
0
  case UninitUse::AfterCall:
852
0
    S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
853
0
      << VD->getDeclName() << IsCapturedByBlock
854
0
      << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
855
0
      << const_cast<DeclContext*>(VD->getLexicalDeclContext())
856
0
      << VD->getSourceRange();
857
0
    S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
858
0
        << IsCapturedByBlock << Use.getUser()->getSourceRange();
859
0
    return;
860
861
0
  case UninitUse::Maybe:
862
0
  case UninitUse::Sometimes:
863
    // Carry on to report sometimes-uninitialized branches, if possible,
864
    // or a 'may be used uninitialized' diagnostic otherwise.
865
0
    break;
866
0
  }
867
868
  // Diagnose each branch which leads to a sometimes-uninitialized use.
869
0
  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
870
0
       I != E; ++I) {
871
0
    assert(Use.getKind() == UninitUse::Sometimes);
872
873
0
    const Expr *User = Use.getUser();
874
0
    const Stmt *Term = I->Terminator;
875
876
    // Information used when building the diagnostic.
877
0
    unsigned DiagKind;
878
0
    StringRef Str;
879
0
    SourceRange Range;
880
881
    // FixIts to suppress the diagnostic by removing the dead condition.
882
    // For all binary terminators, branch 0 is taken if the condition is true,
883
    // and branch 1 is taken if the condition is false.
884
0
    int RemoveDiagKind = -1;
885
0
    const char *FixitStr =
886
0
        S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
887
0
                                  : (I->Output ? "1" : "0");
888
0
    FixItHint Fixit1, Fixit2;
889
890
0
    switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
891
0
    default:
892
      // Don't know how to report this. Just fall back to 'may be used
893
      // uninitialized'. FIXME: Can this happen?
894
0
      continue;
895
896
    // "condition is true / condition is false".
897
0
    case Stmt::IfStmtClass: {
898
0
      const IfStmt *IS = cast<IfStmt>(Term);
899
0
      DiagKind = 0;
900
0
      Str = "if";
901
0
      Range = IS->getCond()->getSourceRange();
902
0
      RemoveDiagKind = 0;
903
0
      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
904
0
                    I->Output, Fixit1, Fixit2);
905
0
      break;
906
0
    }
907
0
    case Stmt::ConditionalOperatorClass: {
908
0
      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
909
0
      DiagKind = 0;
910
0
      Str = "?:";
911
0
      Range = CO->getCond()->getSourceRange();
912
0
      RemoveDiagKind = 0;
913
0
      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
914
0
                    I->Output, Fixit1, Fixit2);
915
0
      break;
916
0
    }
917
0
    case Stmt::BinaryOperatorClass: {
918
0
      const BinaryOperator *BO = cast<BinaryOperator>(Term);
919
0
      if (!BO->isLogicalOp())
920
0
        continue;
921
0
      DiagKind = 0;
922
0
      Str = BO->getOpcodeStr();
923
0
      Range = BO->getLHS()->getSourceRange();
924
0
      RemoveDiagKind = 0;
925
0
      if ((BO->getOpcode() == BO_LAnd && I->Output) ||
926
0
          (BO->getOpcode() == BO_LOr && !I->Output))
927
        // true && y -> y, false || y -> y.
928
0
        Fixit1 = FixItHint::CreateRemoval(
929
0
            SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
930
0
      else
931
        // false && y -> false, true || y -> true.
932
0
        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
933
0
      break;
934
0
    }
935
936
    // "loop is entered / loop is exited".
937
0
    case Stmt::WhileStmtClass:
938
0
      DiagKind = 1;
939
0
      Str = "while";
940
0
      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
941
0
      RemoveDiagKind = 1;
942
0
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
943
0
      break;
944
0
    case Stmt::ForStmtClass:
945
0
      DiagKind = 1;
946
0
      Str = "for";
947
0
      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
948
0
      RemoveDiagKind = 1;
949
0
      if (I->Output)
950
0
        Fixit1 = FixItHint::CreateRemoval(Range);
951
0
      else
952
0
        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
953
0
      break;
954
0
    case Stmt::CXXForRangeStmtClass:
955
0
      if (I->Output == 1) {
956
        // The use occurs if a range-based for loop's body never executes.
957
        // That may be impossible, and there's no syntactic fix for this,
958
        // so treat it as a 'may be uninitialized' case.
959
0
        continue;
960
0
      }
961
0
      DiagKind = 1;
962
0
      Str = "for";
963
0
      Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
964
0
      break;
965
966
    // "condition is true / loop is exited".
967
0
    case Stmt::DoStmtClass:
968
0
      DiagKind = 2;
969
0
      Str = "do";
970
0
      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
971
0
      RemoveDiagKind = 1;
972
0
      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
973
0
      break;
974
975
    // "switch case is taken".
976
0
    case Stmt::CaseStmtClass:
977
0
      DiagKind = 3;
978
0
      Str = "case";
979
0
      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
980
0
      break;
981
0
    case Stmt::DefaultStmtClass:
982
0
      DiagKind = 3;
983
0
      Str = "default";
984
0
      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
985
0
      break;
986
0
    }
987
988
0
    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
989
0
      << VD->getDeclName() << IsCapturedByBlock << DiagKind
990
0
      << Str << I->Output << Range;
991
0
    S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
992
0
        << IsCapturedByBlock << User->getSourceRange();
993
0
    if (RemoveDiagKind != -1)
994
0
      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
995
0
        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
996
997
0
    Diagnosed = true;
998
0
  }
999
1000
0
  if (!Diagnosed)
1001
0
    S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
1002
0
        << VD->getDeclName() << IsCapturedByBlock
1003
0
        << Use.getUser()->getSourceRange();
1004
0
}
1005
1006
/// Diagnose uninitialized const reference usages.
1007
static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1008
0
                                             const UninitUse &Use) {
1009
0
  S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
1010
0
      << VD->getDeclName() << Use.getUser()->getSourceRange();
1011
0
  return true;
1012
0
}
1013
1014
/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1015
/// uninitialized variable. This manages the different forms of diagnostic
1016
/// emitted for particular types of uses. Returns true if the use was diagnosed
1017
/// as a warning. If a particular use is one we omit warnings for, returns
1018
/// false.
1019
static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1020
                                     const UninitUse &Use,
1021
0
                                     bool alwaysReportSelfInit = false) {
1022
0
  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
1023
    // Inspect the initializer of the variable declaration which is
1024
    // being referenced prior to its initialization. We emit
1025
    // specialized diagnostics for self-initialization, and we
1026
    // specifically avoid warning about self references which take the
1027
    // form of:
1028
    //
1029
    //   int x = x;
1030
    //
1031
    // This is used to indicate to GCC that 'x' is intentionally left
1032
    // uninitialized. Proven code paths which access 'x' in
1033
    // an uninitialized state after this will still warn.
1034
0
    if (const Expr *Initializer = VD->getInit()) {
1035
0
      if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1036
0
        return false;
1037
1038
0
      ContainsReference CR(S.Context, DRE);
1039
0
      CR.Visit(Initializer);
1040
0
      if (CR.doesContainReference()) {
1041
0
        S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1042
0
            << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1043
0
        return true;
1044
0
      }
1045
0
    }
1046
1047
0
    DiagUninitUse(S, VD, Use, false);
1048
0
  } else {
1049
0
    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1050
0
    if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1051
0
      S.Diag(BE->getBeginLoc(),
1052
0
             diag::warn_uninit_byref_blockvar_captured_by_block)
1053
0
          << VD->getDeclName()
1054
0
          << VD->getType().getQualifiers().hasObjCLifetime();
1055
0
    else
1056
0
      DiagUninitUse(S, VD, Use, true);
1057
0
  }
1058
1059
  // Report where the variable was declared when the use wasn't within
1060
  // the initializer of that declaration & we didn't already suggest
1061
  // an initialization fixit.
1062
0
  if (!SuggestInitializationFixit(S, VD))
1063
0
    S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1064
0
        << VD->getDeclName();
1065
1066
0
  return true;
1067
0
}
1068
1069
namespace {
1070
  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1071
  public:
1072
    FallthroughMapper(Sema &S)
1073
      : FoundSwitchStatements(false),
1074
0
        S(S) {
1075
0
    }
1076
1077
0
    bool foundSwitchStatements() const { return FoundSwitchStatements; }
1078
1079
0
    void markFallthroughVisited(const AttributedStmt *Stmt) {
1080
0
      bool Found = FallthroughStmts.erase(Stmt);
1081
0
      assert(Found);
1082
0
      (void)Found;
1083
0
    }
1084
1085
    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1086
1087
0
    const AttrStmts &getFallthroughStmts() const {
1088
0
      return FallthroughStmts;
1089
0
    }
1090
1091
0
    void fillReachableBlocks(CFG *Cfg) {
1092
0
      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1093
0
      std::deque<const CFGBlock *> BlockQueue;
1094
1095
0
      ReachableBlocks.insert(&Cfg->getEntry());
1096
0
      BlockQueue.push_back(&Cfg->getEntry());
1097
      // Mark all case blocks reachable to avoid problems with switching on
1098
      // constants, covered enums, etc.
1099
      // These blocks can contain fall-through annotations, and we don't want to
1100
      // issue a warn_fallthrough_attr_unreachable for them.
1101
0
      for (const auto *B : *Cfg) {
1102
0
        const Stmt *L = B->getLabel();
1103
0
        if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B).second)
1104
0
          BlockQueue.push_back(B);
1105
0
      }
1106
1107
0
      while (!BlockQueue.empty()) {
1108
0
        const CFGBlock *P = BlockQueue.front();
1109
0
        BlockQueue.pop_front();
1110
0
        for (const CFGBlock *B : P->succs()) {
1111
0
          if (B && ReachableBlocks.insert(B).second)
1112
0
            BlockQueue.push_back(B);
1113
0
        }
1114
0
      }
1115
0
    }
1116
1117
    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1118
0
                                   bool IsTemplateInstantiation) {
1119
0
      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1120
1121
0
      int UnannotatedCnt = 0;
1122
0
      AnnotatedCnt = 0;
1123
1124
0
      std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1125
0
      while (!BlockQueue.empty()) {
1126
0
        const CFGBlock *P = BlockQueue.front();
1127
0
        BlockQueue.pop_front();
1128
0
        if (!P) continue;
1129
1130
0
        const Stmt *Term = P->getTerminatorStmt();
1131
0
        if (Term && isa<SwitchStmt>(Term))
1132
0
          continue; // Switch statement, good.
1133
1134
0
        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1135
0
        if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1136
0
          continue; // Previous case label has no statements, good.
1137
1138
0
        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1139
0
        if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1140
0
          continue; // Case label is preceded with a normal label, good.
1141
1142
0
        if (!ReachableBlocks.count(P)) {
1143
0
          for (const CFGElement &Elem : llvm::reverse(*P)) {
1144
0
            if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1145
0
            if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1146
              // Don't issue a warning for an unreachable fallthrough
1147
              // attribute in template instantiations as it may not be
1148
              // unreachable in all instantiations of the template.
1149
0
              if (!IsTemplateInstantiation)
1150
0
                S.Diag(AS->getBeginLoc(),
1151
0
                       diag::warn_unreachable_fallthrough_attr);
1152
0
              markFallthroughVisited(AS);
1153
0
              ++AnnotatedCnt;
1154
0
              break;
1155
0
            }
1156
            // Don't care about other unreachable statements.
1157
0
            }
1158
0
          }
1159
          // If there are no unreachable statements, this may be a special
1160
          // case in CFG:
1161
          // case X: {
1162
          //    A a;  // A has a destructor.
1163
          //    break;
1164
          // }
1165
          // // <<<< This place is represented by a 'hanging' CFG block.
1166
          // case Y:
1167
0
          continue;
1168
0
        }
1169
1170
0
        const Stmt *LastStmt = getLastStmt(*P);
1171
0
        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1172
0
          markFallthroughVisited(AS);
1173
0
          ++AnnotatedCnt;
1174
0
          continue; // Fallthrough annotation, good.
1175
0
        }
1176
1177
0
        if (!LastStmt) { // This block contains no executable statements.
1178
          // Traverse its predecessors.
1179
0
          std::copy(P->pred_begin(), P->pred_end(),
1180
0
                    std::back_inserter(BlockQueue));
1181
0
          continue;
1182
0
        }
1183
1184
0
        ++UnannotatedCnt;
1185
0
      }
1186
0
      return !!UnannotatedCnt;
1187
0
    }
1188
1189
    // RecursiveASTVisitor setup.
1190
0
    bool shouldWalkTypesOfTypeLocs() const { return false; }
1191
1192
0
    bool VisitAttributedStmt(AttributedStmt *S) {
1193
0
      if (asFallThroughAttr(S))
1194
0
        FallthroughStmts.insert(S);
1195
0
      return true;
1196
0
    }
1197
1198
0
    bool VisitSwitchStmt(SwitchStmt *S) {
1199
0
      FoundSwitchStatements = true;
1200
0
      return true;
1201
0
    }
1202
1203
    // We don't want to traverse local type declarations. We analyze their
1204
    // methods separately.
1205
0
    bool TraverseDecl(Decl *D) { return true; }
1206
1207
    // We analyze lambda bodies separately. Skip them here.
1208
0
    bool TraverseLambdaExpr(LambdaExpr *LE) {
1209
      // Traverse the captures, but not the body.
1210
0
      for (const auto C : zip(LE->captures(), LE->capture_inits()))
1211
0
        TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1212
0
      return true;
1213
0
    }
1214
1215
  private:
1216
1217
0
    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1218
0
      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1219
0
        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1220
0
          return AS;
1221
0
      }
1222
0
      return nullptr;
1223
0
    }
1224
1225
0
    static const Stmt *getLastStmt(const CFGBlock &B) {
1226
0
      if (const Stmt *Term = B.getTerminatorStmt())
1227
0
        return Term;
1228
0
      for (const CFGElement &Elem : llvm::reverse(B))
1229
0
        if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1230
0
          return CS->getStmt();
1231
      // Workaround to detect a statement thrown out by CFGBuilder:
1232
      //   case X: {} case Y:
1233
      //   case X: ; case Y:
1234
0
      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1235
0
        if (!isa<SwitchCase>(SW->getSubStmt()))
1236
0
          return SW->getSubStmt();
1237
1238
0
      return nullptr;
1239
0
    }
1240
1241
    bool FoundSwitchStatements;
1242
    AttrStmts FallthroughStmts;
1243
    Sema &S;
1244
    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1245
  };
1246
} // anonymous namespace
1247
1248
static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1249
0
                                            SourceLocation Loc) {
1250
0
  TokenValue FallthroughTokens[] = {
1251
0
    tok::l_square, tok::l_square,
1252
0
    PP.getIdentifierInfo("fallthrough"),
1253
0
    tok::r_square, tok::r_square
1254
0
  };
1255
1256
0
  TokenValue ClangFallthroughTokens[] = {
1257
0
    tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1258
0
    tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1259
0
    tok::r_square, tok::r_square
1260
0
  };
1261
1262
0
  bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1263
1264
0
  StringRef MacroName;
1265
0
  if (PreferClangAttr)
1266
0
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1267
0
  if (MacroName.empty())
1268
0
    MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1269
0
  if (MacroName.empty() && !PreferClangAttr)
1270
0
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1271
0
  if (MacroName.empty()) {
1272
0
    if (!PreferClangAttr)
1273
0
      MacroName = "[[fallthrough]]";
1274
0
    else if (PP.getLangOpts().CPlusPlus)
1275
0
      MacroName = "[[clang::fallthrough]]";
1276
0
    else
1277
0
      MacroName = "__attribute__((fallthrough))";
1278
0
  }
1279
0
  return MacroName;
1280
0
}
1281
1282
static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1283
0
                                            bool PerFunction) {
1284
0
  FallthroughMapper FM(S);
1285
0
  FM.TraverseStmt(AC.getBody());
1286
1287
0
  if (!FM.foundSwitchStatements())
1288
0
    return;
1289
1290
0
  if (PerFunction && FM.getFallthroughStmts().empty())
1291
0
    return;
1292
1293
0
  CFG *Cfg = AC.getCFG();
1294
1295
0
  if (!Cfg)
1296
0
    return;
1297
1298
0
  FM.fillReachableBlocks(Cfg);
1299
1300
0
  for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1301
0
    const Stmt *Label = B->getLabel();
1302
1303
0
    if (!isa_and_nonnull<SwitchCase>(Label))
1304
0
      continue;
1305
1306
0
    int AnnotatedCnt;
1307
1308
0
    bool IsTemplateInstantiation = false;
1309
0
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1310
0
      IsTemplateInstantiation = Function->isTemplateInstantiation();
1311
0
    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1312
0
                                      IsTemplateInstantiation))
1313
0
      continue;
1314
1315
0
    S.Diag(Label->getBeginLoc(),
1316
0
           PerFunction ? diag::warn_unannotated_fallthrough_per_function
1317
0
                       : diag::warn_unannotated_fallthrough);
1318
1319
0
    if (!AnnotatedCnt) {
1320
0
      SourceLocation L = Label->getBeginLoc();
1321
0
      if (L.isMacroID())
1322
0
        continue;
1323
1324
0
      const Stmt *Term = B->getTerminatorStmt();
1325
      // Skip empty cases.
1326
0
      while (B->empty() && !Term && B->succ_size() == 1) {
1327
0
        B = *B->succ_begin();
1328
0
        Term = B->getTerminatorStmt();
1329
0
      }
1330
0
      if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
1331
0
        Preprocessor &PP = S.getPreprocessor();
1332
0
        StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1333
0
        SmallString<64> TextToInsert(AnnotationSpelling);
1334
0
        TextToInsert += "; ";
1335
0
        S.Diag(L, diag::note_insert_fallthrough_fixit)
1336
0
            << AnnotationSpelling
1337
0
            << FixItHint::CreateInsertion(L, TextToInsert);
1338
0
      }
1339
0
      S.Diag(L, diag::note_insert_break_fixit)
1340
0
          << FixItHint::CreateInsertion(L, "break; ");
1341
0
    }
1342
0
  }
1343
1344
0
  for (const auto *F : FM.getFallthroughStmts())
1345
0
    S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1346
0
}
1347
1348
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1349
0
                     const Stmt *S) {
1350
0
  assert(S);
1351
1352
0
  do {
1353
0
    switch (S->getStmtClass()) {
1354
0
    case Stmt::ForStmtClass:
1355
0
    case Stmt::WhileStmtClass:
1356
0
    case Stmt::CXXForRangeStmtClass:
1357
0
    case Stmt::ObjCForCollectionStmtClass:
1358
0
      return true;
1359
0
    case Stmt::DoStmtClass: {
1360
0
      Expr::EvalResult Result;
1361
0
      if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1362
0
        return true;
1363
0
      return Result.Val.getInt().getBoolValue();
1364
0
    }
1365
0
    default:
1366
0
      break;
1367
0
    }
1368
0
  } while ((S = PM.getParent(S)));
1369
1370
0
  return false;
1371
0
}
1372
1373
static void diagnoseRepeatedUseOfWeak(Sema &S,
1374
                                      const sema::FunctionScopeInfo *CurFn,
1375
                                      const Decl *D,
1376
0
                                      const ParentMap &PM) {
1377
0
  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1378
0
  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1379
0
  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1380
0
  typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1381
0
  StmtUsesPair;
1382
1383
0
  ASTContext &Ctx = S.getASTContext();
1384
1385
0
  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1386
1387
  // Extract all weak objects that are referenced more than once.
1388
0
  SmallVector<StmtUsesPair, 8> UsesByStmt;
1389
0
  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1390
0
       I != E; ++I) {
1391
0
    const WeakUseVector &Uses = I->second;
1392
1393
    // Find the first read of the weak object.
1394
0
    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1395
0
    for ( ; UI != UE; ++UI) {
1396
0
      if (UI->isUnsafe())
1397
0
        break;
1398
0
    }
1399
1400
    // If there were only writes to this object, don't warn.
1401
0
    if (UI == UE)
1402
0
      continue;
1403
1404
    // If there was only one read, followed by any number of writes, and the
1405
    // read is not within a loop, don't warn. Additionally, don't warn in a
1406
    // loop if the base object is a local variable -- local variables are often
1407
    // changed in loops.
1408
0
    if (UI == Uses.begin()) {
1409
0
      WeakUseVector::const_iterator UI2 = UI;
1410
0
      for (++UI2; UI2 != UE; ++UI2)
1411
0
        if (UI2->isUnsafe())
1412
0
          break;
1413
1414
0
      if (UI2 == UE) {
1415
0
        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1416
0
          continue;
1417
1418
0
        const WeakObjectProfileTy &Profile = I->first;
1419
0
        if (!Profile.isExactProfile())
1420
0
          continue;
1421
1422
0
        const NamedDecl *Base = Profile.getBase();
1423
0
        if (!Base)
1424
0
          Base = Profile.getProperty();
1425
0
        assert(Base && "A profile always has a base or property.");
1426
1427
0
        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1428
0
          if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1429
0
            continue;
1430
0
      }
1431
0
    }
1432
1433
0
    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1434
0
  }
1435
1436
0
  if (UsesByStmt.empty())
1437
0
    return;
1438
1439
  // Sort by first use so that we emit the warnings in a deterministic order.
1440
0
  SourceManager &SM = S.getSourceManager();
1441
0
  llvm::sort(UsesByStmt,
1442
0
             [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1443
0
               return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1444
0
                                                   RHS.first->getBeginLoc());
1445
0
             });
1446
1447
  // Classify the current code body for better warning text.
1448
  // This enum should stay in sync with the cases in
1449
  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1450
  // FIXME: Should we use a common classification enum and the same set of
1451
  // possibilities all throughout Sema?
1452
0
  enum {
1453
0
    Function,
1454
0
    Method,
1455
0
    Block,
1456
0
    Lambda
1457
0
  } FunctionKind;
1458
1459
0
  if (isa<sema::BlockScopeInfo>(CurFn))
1460
0
    FunctionKind = Block;
1461
0
  else if (isa<sema::LambdaScopeInfo>(CurFn))
1462
0
    FunctionKind = Lambda;
1463
0
  else if (isa<ObjCMethodDecl>(D))
1464
0
    FunctionKind = Method;
1465
0
  else
1466
0
    FunctionKind = Function;
1467
1468
  // Iterate through the sorted problems and emit warnings for each.
1469
0
  for (const auto &P : UsesByStmt) {
1470
0
    const Stmt *FirstRead = P.first;
1471
0
    const WeakObjectProfileTy &Key = P.second->first;
1472
0
    const WeakUseVector &Uses = P.second->second;
1473
1474
    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1475
    // may not contain enough information to determine that these are different
1476
    // properties. We can only be 100% sure of a repeated use in certain cases,
1477
    // and we adjust the diagnostic kind accordingly so that the less certain
1478
    // case can be turned off if it is too noisy.
1479
0
    unsigned DiagKind;
1480
0
    if (Key.isExactProfile())
1481
0
      DiagKind = diag::warn_arc_repeated_use_of_weak;
1482
0
    else
1483
0
      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1484
1485
    // Classify the weak object being accessed for better warning text.
1486
    // This enum should stay in sync with the cases in
1487
    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1488
0
    enum {
1489
0
      Variable,
1490
0
      Property,
1491
0
      ImplicitProperty,
1492
0
      Ivar
1493
0
    } ObjectKind;
1494
1495
0
    const NamedDecl *KeyProp = Key.getProperty();
1496
0
    if (isa<VarDecl>(KeyProp))
1497
0
      ObjectKind = Variable;
1498
0
    else if (isa<ObjCPropertyDecl>(KeyProp))
1499
0
      ObjectKind = Property;
1500
0
    else if (isa<ObjCMethodDecl>(KeyProp))
1501
0
      ObjectKind = ImplicitProperty;
1502
0
    else if (isa<ObjCIvarDecl>(KeyProp))
1503
0
      ObjectKind = Ivar;
1504
0
    else
1505
0
      llvm_unreachable("Unexpected weak object kind!");
1506
1507
    // Do not warn about IBOutlet weak property receivers being set to null
1508
    // since they are typically only used from the main thread.
1509
0
    if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1510
0
      if (Prop->hasAttr<IBOutletAttr>())
1511
0
        continue;
1512
1513
    // Show the first time the object was read.
1514
0
    S.Diag(FirstRead->getBeginLoc(), DiagKind)
1515
0
        << int(ObjectKind) << KeyProp << int(FunctionKind)
1516
0
        << FirstRead->getSourceRange();
1517
1518
    // Print all the other accesses as notes.
1519
0
    for (const auto &Use : Uses) {
1520
0
      if (Use.getUseExpr() == FirstRead)
1521
0
        continue;
1522
0
      S.Diag(Use.getUseExpr()->getBeginLoc(),
1523
0
             diag::note_arc_weak_also_accessed_here)
1524
0
          << Use.getUseExpr()->getSourceRange();
1525
0
    }
1526
0
  }
1527
0
}
1528
1529
namespace clang {
1530
namespace {
1531
typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1532
typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1533
typedef std::list<DelayedDiag> DiagList;
1534
1535
struct SortDiagBySourceLocation {
1536
  SourceManager &SM;
1537
0
  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1538
1539
0
  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1540
    // Although this call will be slow, this is only called when outputting
1541
    // multiple warnings.
1542
0
    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1543
0
  }
1544
};
1545
} // anonymous namespace
1546
} // namespace clang
1547
1548
namespace {
1549
class UninitValsDiagReporter : public UninitVariablesHandler {
1550
  Sema &S;
1551
  typedef SmallVector<UninitUse, 2> UsesVec;
1552
  typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1553
  // Prefer using MapVector to DenseMap, so that iteration order will be
1554
  // the same as insertion order. This is needed to obtain a deterministic
1555
  // order of diagnostics when calling flushDiagnostics().
1556
  typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1557
  UsesMap uses;
1558
  UsesMap constRefUses;
1559
1560
public:
1561
0
  UninitValsDiagReporter(Sema &S) : S(S) {}
1562
0
  ~UninitValsDiagReporter() override { flushDiagnostics(); }
1563
1564
0
  MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1565
0
    MappedType &V = um[vd];
1566
0
    if (!V.getPointer())
1567
0
      V.setPointer(new UsesVec());
1568
0
    return V;
1569
0
  }
1570
1571
  void handleUseOfUninitVariable(const VarDecl *vd,
1572
0
                                 const UninitUse &use) override {
1573
0
    getUses(uses, vd).getPointer()->push_back(use);
1574
0
  }
1575
1576
  void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1577
0
                                         const UninitUse &use) override {
1578
0
    getUses(constRefUses, vd).getPointer()->push_back(use);
1579
0
  }
1580
1581
0
  void handleSelfInit(const VarDecl *vd) override {
1582
0
    getUses(uses, vd).setInt(true);
1583
0
    getUses(constRefUses, vd).setInt(true);
1584
0
  }
1585
1586
0
  void flushDiagnostics() {
1587
0
    for (const auto &P : uses) {
1588
0
      const VarDecl *vd = P.first;
1589
0
      const MappedType &V = P.second;
1590
1591
0
      UsesVec *vec = V.getPointer();
1592
0
      bool hasSelfInit = V.getInt();
1593
1594
      // Specially handle the case where we have uses of an uninitialized
1595
      // variable, but the root cause is an idiomatic self-init.  We want
1596
      // to report the diagnostic at the self-init since that is the root cause.
1597
0
      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1598
0
        DiagnoseUninitializedUse(S, vd,
1599
0
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1600
0
                                           /* isAlwaysUninit */ true),
1601
0
                                 /* alwaysReportSelfInit */ true);
1602
0
      else {
1603
        // Sort the uses by their SourceLocations.  While not strictly
1604
        // guaranteed to produce them in line/column order, this will provide
1605
        // a stable ordering.
1606
0
        llvm::sort(*vec, [](const UninitUse &a, const UninitUse &b) {
1607
          // Prefer a more confident report over a less confident one.
1608
0
          if (a.getKind() != b.getKind())
1609
0
            return a.getKind() > b.getKind();
1610
0
          return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1611
0
        });
1612
1613
0
        for (const auto &U : *vec) {
1614
          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1615
0
          UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1616
1617
0
          if (DiagnoseUninitializedUse(S, vd, Use))
1618
            // Skip further diagnostics for this variable. We try to warn only
1619
            // on the first point at which a variable is used uninitialized.
1620
0
            break;
1621
0
        }
1622
0
      }
1623
1624
      // Release the uses vector.
1625
0
      delete vec;
1626
0
    }
1627
1628
0
    uses.clear();
1629
1630
    // Flush all const reference uses diags.
1631
0
    for (const auto &P : constRefUses) {
1632
0
      const VarDecl *vd = P.first;
1633
0
      const MappedType &V = P.second;
1634
1635
0
      UsesVec *vec = V.getPointer();
1636
0
      bool hasSelfInit = V.getInt();
1637
1638
0
      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1639
0
        DiagnoseUninitializedUse(S, vd,
1640
0
                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1641
0
                                           /* isAlwaysUninit */ true),
1642
0
                                 /* alwaysReportSelfInit */ true);
1643
0
      else {
1644
0
        for (const auto &U : *vec) {
1645
0
          if (DiagnoseUninitializedConstRefUse(S, vd, U))
1646
0
            break;
1647
0
        }
1648
0
      }
1649
1650
      // Release the uses vector.
1651
0
      delete vec;
1652
0
    }
1653
1654
0
    constRefUses.clear();
1655
0
  }
1656
1657
private:
1658
0
  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1659
0
    return llvm::any_of(*vec, [](const UninitUse &U) {
1660
0
      return U.getKind() == UninitUse::Always ||
1661
0
             U.getKind() == UninitUse::AfterCall ||
1662
0
             U.getKind() == UninitUse::AfterDecl;
1663
0
    });
1664
0
  }
1665
};
1666
1667
/// Inter-procedural data for the called-once checker.
1668
class CalledOnceInterProceduralData {
1669
public:
1670
  // Add the delayed warning for the given block.
1671
  void addDelayedWarning(const BlockDecl *Block,
1672
0
                         PartialDiagnosticAt &&Warning) {
1673
0
    DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1674
0
  }
1675
  // Report all of the warnings we've gathered for the given block.
1676
0
  void flushWarnings(const BlockDecl *Block, Sema &S) {
1677
0
    for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1678
0
      S.Diag(Delayed.first, Delayed.second);
1679
1680
0
    discardWarnings(Block);
1681
0
  }
1682
  // Discard all of the warnings we've gathered for the given block.
1683
0
  void discardWarnings(const BlockDecl *Block) {
1684
0
    DelayedBlockWarnings.erase(Block);
1685
0
  }
1686
1687
private:
1688
  using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1689
  llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1690
};
1691
1692
class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1693
public:
1694
  CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1695
0
      : S(S), Data(Data) {}
1696
  void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1697
                        const Expr *PrevCall, bool IsCompletionHandler,
1698
0
                        bool Poised) override {
1699
0
    auto DiagToReport = IsCompletionHandler
1700
0
                            ? diag::warn_completion_handler_called_twice
1701
0
                            : diag::warn_called_once_gets_called_twice;
1702
0
    S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1703
0
    S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1704
0
        << Poised;
1705
0
  }
1706
1707
  void handleNeverCalled(const ParmVarDecl *Parameter,
1708
0
                         bool IsCompletionHandler) override {
1709
0
    auto DiagToReport = IsCompletionHandler
1710
0
                            ? diag::warn_completion_handler_never_called
1711
0
                            : diag::warn_called_once_never_called;
1712
0
    S.Diag(Parameter->getBeginLoc(), DiagToReport)
1713
0
        << Parameter << /* Captured */ false;
1714
0
  }
1715
1716
  void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1717
                         const Stmt *Where, NeverCalledReason Reason,
1718
                         bool IsCalledDirectly,
1719
0
                         bool IsCompletionHandler) override {
1720
0
    auto DiagToReport = IsCompletionHandler
1721
0
                            ? diag::warn_completion_handler_never_called_when
1722
0
                            : diag::warn_called_once_never_called_when;
1723
0
    PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1724
0
                                                          << Parameter
1725
0
                                                          << IsCalledDirectly
1726
0
                                                          << (unsigned)Reason);
1727
1728
0
    if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1729
      // We shouldn't report these warnings on blocks immediately
1730
0
      Data.addDelayedWarning(Block, std::move(Warning));
1731
0
    } else {
1732
0
      S.Diag(Warning.first, Warning.second);
1733
0
    }
1734
0
  }
1735
1736
  void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1737
                                 const Decl *Where,
1738
0
                                 bool IsCompletionHandler) override {
1739
0
    auto DiagToReport = IsCompletionHandler
1740
0
                            ? diag::warn_completion_handler_never_called
1741
0
                            : diag::warn_called_once_never_called;
1742
0
    S.Diag(Where->getBeginLoc(), DiagToReport)
1743
0
        << Parameter << /* Captured */ true;
1744
0
  }
1745
1746
  void
1747
0
  handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1748
0
    Data.flushWarnings(Block, S);
1749
0
  }
1750
1751
0
  void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1752
0
    Data.discardWarnings(Block);
1753
0
  }
1754
1755
private:
1756
  Sema &S;
1757
  CalledOnceInterProceduralData &Data;
1758
};
1759
1760
constexpr unsigned CalledOnceWarnings[] = {
1761
    diag::warn_called_once_never_called,
1762
    diag::warn_called_once_never_called_when,
1763
    diag::warn_called_once_gets_called_twice};
1764
1765
constexpr unsigned CompletionHandlerWarnings[]{
1766
    diag::warn_completion_handler_never_called,
1767
    diag::warn_completion_handler_never_called_when,
1768
    diag::warn_completion_handler_called_twice};
1769
1770
bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1771
                                 const DiagnosticsEngine &Diags,
1772
0
                                 SourceLocation At) {
1773
0
  return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1774
0
    return !Diags.isIgnored(DiagID, At);
1775
0
  });
1776
0
}
1777
1778
bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1779
0
                                        SourceLocation At) {
1780
0
  return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1781
0
}
1782
1783
bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1784
0
                                       SourceLocation At) {
1785
0
  return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1786
0
         shouldAnalyzeCalledOnceConventions(Diags, At);
1787
0
}
1788
} // anonymous namespace
1789
1790
//===----------------------------------------------------------------------===//
1791
// -Wthread-safety
1792
//===----------------------------------------------------------------------===//
1793
namespace clang {
1794
namespace threadSafety {
1795
namespace {
1796
class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1797
  Sema &S;
1798
  DiagList Warnings;
1799
  SourceLocation FunLocation, FunEndLocation;
1800
1801
  const FunctionDecl *CurrentFunction;
1802
  bool Verbose;
1803
1804
0
  OptionalNotes getNotes() const {
1805
0
    if (Verbose && CurrentFunction) {
1806
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1807
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1808
0
                                    << CurrentFunction);
1809
0
      return OptionalNotes(1, FNote);
1810
0
    }
1811
0
    return OptionalNotes();
1812
0
  }
1813
1814
0
  OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1815
0
    OptionalNotes ONS(1, Note);
1816
0
    if (Verbose && CurrentFunction) {
1817
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1818
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1819
0
                                    << CurrentFunction);
1820
0
      ONS.push_back(std::move(FNote));
1821
0
    }
1822
0
    return ONS;
1823
0
  }
1824
1825
  OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1826
0
                         const PartialDiagnosticAt &Note2) const {
1827
0
    OptionalNotes ONS;
1828
0
    ONS.push_back(Note1);
1829
0
    ONS.push_back(Note2);
1830
0
    if (Verbose && CurrentFunction) {
1831
0
      PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1832
0
                                S.PDiag(diag::note_thread_warning_in_fun)
1833
0
                                    << CurrentFunction);
1834
0
      ONS.push_back(std::move(FNote));
1835
0
    }
1836
0
    return ONS;
1837
0
  }
1838
1839
0
  OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1840
0
    return LocLocked.isValid()
1841
0
               ? getNotes(PartialDiagnosticAt(
1842
0
                     LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1843
0
               : getNotes();
1844
0
  }
1845
1846
  OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1847
0
                                     StringRef Kind) {
1848
0
    return LocUnlocked.isValid()
1849
0
               ? getNotes(PartialDiagnosticAt(
1850
0
                     LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1851
0
               : getNotes();
1852
0
  }
1853
1854
 public:
1855
  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1856
    : S(S), FunLocation(FL), FunEndLocation(FEL),
1857
0
      CurrentFunction(nullptr), Verbose(false) {}
1858
1859
0
  void setVerbose(bool b) { Verbose = b; }
1860
1861
  /// Emit all buffered diagnostics in order of sourcelocation.
1862
  /// We need to output diagnostics produced while iterating through
1863
  /// the lockset in deterministic order, so this function orders diagnostics
1864
  /// and outputs them.
1865
0
  void emitDiagnostics() {
1866
0
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1867
0
    for (const auto &Diag : Warnings) {
1868
0
      S.Diag(Diag.first.first, Diag.first.second);
1869
0
      for (const auto &Note : Diag.second)
1870
0
        S.Diag(Note.first, Note.second);
1871
0
    }
1872
0
  }
1873
1874
0
  void handleInvalidLockExp(SourceLocation Loc) override {
1875
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1876
0
                                         << Loc);
1877
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1878
0
  }
1879
1880
  void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1881
0
                             SourceLocation LocPreviousUnlock) override {
1882
0
    if (Loc.isInvalid())
1883
0
      Loc = FunLocation;
1884
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1885
0
                                         << Kind << LockName);
1886
0
    Warnings.emplace_back(std::move(Warning),
1887
0
                          makeUnlockedHereNote(LocPreviousUnlock, Kind));
1888
0
  }
1889
1890
  void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1891
                                 LockKind Expected, LockKind Received,
1892
                                 SourceLocation LocLocked,
1893
0
                                 SourceLocation LocUnlock) override {
1894
0
    if (LocUnlock.isInvalid())
1895
0
      LocUnlock = FunLocation;
1896
0
    PartialDiagnosticAt Warning(
1897
0
        LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1898
0
                       << Kind << LockName << Received << Expected);
1899
0
    Warnings.emplace_back(std::move(Warning),
1900
0
                          makeLockedHereNote(LocLocked, Kind));
1901
0
  }
1902
1903
  void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1904
0
                        SourceLocation LocDoubleLock) override {
1905
0
    if (LocDoubleLock.isInvalid())
1906
0
      LocDoubleLock = FunLocation;
1907
0
    PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1908
0
                                                   << Kind << LockName);
1909
0
    Warnings.emplace_back(std::move(Warning),
1910
0
                          makeLockedHereNote(LocLocked, Kind));
1911
0
  }
1912
1913
  void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1914
                                 SourceLocation LocLocked,
1915
                                 SourceLocation LocEndOfScope,
1916
0
                                 LockErrorKind LEK) override {
1917
0
    unsigned DiagID = 0;
1918
0
    switch (LEK) {
1919
0
      case LEK_LockedSomePredecessors:
1920
0
        DiagID = diag::warn_lock_some_predecessors;
1921
0
        break;
1922
0
      case LEK_LockedSomeLoopIterations:
1923
0
        DiagID = diag::warn_expecting_lock_held_on_loop;
1924
0
        break;
1925
0
      case LEK_LockedAtEndOfFunction:
1926
0
        DiagID = diag::warn_no_unlock;
1927
0
        break;
1928
0
      case LEK_NotLockedAtEndOfFunction:
1929
0
        DiagID = diag::warn_expecting_locked;
1930
0
        break;
1931
0
    }
1932
0
    if (LocEndOfScope.isInvalid())
1933
0
      LocEndOfScope = FunEndLocation;
1934
1935
0
    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1936
0
                                                               << LockName);
1937
0
    Warnings.emplace_back(std::move(Warning),
1938
0
                          makeLockedHereNote(LocLocked, Kind));
1939
0
  }
1940
1941
  void handleExclusiveAndShared(StringRef Kind, Name LockName,
1942
                                SourceLocation Loc1,
1943
0
                                SourceLocation Loc2) override {
1944
0
    PartialDiagnosticAt Warning(Loc1,
1945
0
                                S.PDiag(diag::warn_lock_exclusive_and_shared)
1946
0
                                    << Kind << LockName);
1947
0
    PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1948
0
                                       << Kind << LockName);
1949
0
    Warnings.emplace_back(std::move(Warning), getNotes(Note));
1950
0
  }
1951
1952
  void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1953
0
                         AccessKind AK, SourceLocation Loc) override {
1954
0
    assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1955
0
           "Only works for variables");
1956
0
    unsigned DiagID = POK == POK_VarAccess?
1957
0
                        diag::warn_variable_requires_any_lock:
1958
0
                        diag::warn_var_deref_requires_any_lock;
1959
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1960
0
      << D << getLockKindFromAccessKind(AK));
1961
0
    Warnings.emplace_back(std::move(Warning), getNotes());
1962
0
  }
1963
1964
  void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1965
                          ProtectedOperationKind POK, Name LockName,
1966
                          LockKind LK, SourceLocation Loc,
1967
0
                          Name *PossibleMatch) override {
1968
0
    unsigned DiagID = 0;
1969
0
    if (PossibleMatch) {
1970
0
      switch (POK) {
1971
0
        case POK_VarAccess:
1972
0
          DiagID = diag::warn_variable_requires_lock_precise;
1973
0
          break;
1974
0
        case POK_VarDereference:
1975
0
          DiagID = diag::warn_var_deref_requires_lock_precise;
1976
0
          break;
1977
0
        case POK_FunctionCall:
1978
0
          DiagID = diag::warn_fun_requires_lock_precise;
1979
0
          break;
1980
0
        case POK_PassByRef:
1981
0
          DiagID = diag::warn_guarded_pass_by_reference;
1982
0
          break;
1983
0
        case POK_PtPassByRef:
1984
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
1985
0
          break;
1986
0
        case POK_ReturnByRef:
1987
0
          DiagID = diag::warn_guarded_return_by_reference;
1988
0
          break;
1989
0
        case POK_PtReturnByRef:
1990
0
          DiagID = diag::warn_pt_guarded_return_by_reference;
1991
0
          break;
1992
0
      }
1993
0
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1994
0
                                                       << D
1995
0
                                                       << LockName << LK);
1996
0
      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1997
0
                                        << *PossibleMatch);
1998
0
      if (Verbose && POK == POK_VarAccess) {
1999
0
        PartialDiagnosticAt VNote(D->getLocation(),
2000
0
                                  S.PDiag(diag::note_guarded_by_declared_here)
2001
0
                                      << D->getDeclName());
2002
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
2003
0
      } else
2004
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
2005
0
    } else {
2006
0
      switch (POK) {
2007
0
        case POK_VarAccess:
2008
0
          DiagID = diag::warn_variable_requires_lock;
2009
0
          break;
2010
0
        case POK_VarDereference:
2011
0
          DiagID = diag::warn_var_deref_requires_lock;
2012
0
          break;
2013
0
        case POK_FunctionCall:
2014
0
          DiagID = diag::warn_fun_requires_lock;
2015
0
          break;
2016
0
        case POK_PassByRef:
2017
0
          DiagID = diag::warn_guarded_pass_by_reference;
2018
0
          break;
2019
0
        case POK_PtPassByRef:
2020
0
          DiagID = diag::warn_pt_guarded_pass_by_reference;
2021
0
          break;
2022
0
        case POK_ReturnByRef:
2023
0
          DiagID = diag::warn_guarded_return_by_reference;
2024
0
          break;
2025
0
        case POK_PtReturnByRef:
2026
0
          DiagID = diag::warn_pt_guarded_return_by_reference;
2027
0
          break;
2028
0
      }
2029
0
      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2030
0
                                                       << D
2031
0
                                                       << LockName << LK);
2032
0
      if (Verbose && POK == POK_VarAccess) {
2033
0
        PartialDiagnosticAt Note(D->getLocation(),
2034
0
                                 S.PDiag(diag::note_guarded_by_declared_here));
2035
0
        Warnings.emplace_back(std::move(Warning), getNotes(Note));
2036
0
      } else
2037
0
        Warnings.emplace_back(std::move(Warning), getNotes());
2038
0
    }
2039
0
  }
2040
2041
  void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2042
0
                             SourceLocation Loc) override {
2043
0
    PartialDiagnosticAt Warning(Loc,
2044
0
        S.PDiag(diag::warn_acquire_requires_negative_cap)
2045
0
        << Kind << LockName << Neg);
2046
0
    Warnings.emplace_back(std::move(Warning), getNotes());
2047
0
  }
2048
2049
  void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2050
0
                             SourceLocation Loc) override {
2051
0
    PartialDiagnosticAt Warning(
2052
0
        Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2053
0
    Warnings.emplace_back(std::move(Warning), getNotes());
2054
0
  }
2055
2056
  void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2057
0
                             SourceLocation Loc) override {
2058
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2059
0
                                         << Kind << FunName << LockName);
2060
0
    Warnings.emplace_back(std::move(Warning), getNotes());
2061
0
  }
2062
2063
  void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2064
0
                                SourceLocation Loc) override {
2065
0
    PartialDiagnosticAt Warning(Loc,
2066
0
      S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2067
0
    Warnings.emplace_back(std::move(Warning), getNotes());
2068
0
  }
2069
2070
0
  void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2071
0
    PartialDiagnosticAt Warning(Loc,
2072
0
      S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2073
0
    Warnings.emplace_back(std::move(Warning), getNotes());
2074
0
  }
2075
2076
0
  void enterFunction(const FunctionDecl* FD) override {
2077
0
    CurrentFunction = FD;
2078
0
  }
2079
2080
0
  void leaveFunction(const FunctionDecl* FD) override {
2081
0
    CurrentFunction = nullptr;
2082
0
  }
2083
};
2084
} // anonymous namespace
2085
} // namespace threadSafety
2086
} // namespace clang
2087
2088
//===----------------------------------------------------------------------===//
2089
// -Wconsumed
2090
//===----------------------------------------------------------------------===//
2091
2092
namespace clang {
2093
namespace consumed {
2094
namespace {
2095
class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2096
2097
  Sema &S;
2098
  DiagList Warnings;
2099
2100
public:
2101
2102
0
  ConsumedWarningsHandler(Sema &S) : S(S) {}
2103
2104
0
  void emitDiagnostics() override {
2105
0
    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2106
0
    for (const auto &Diag : Warnings) {
2107
0
      S.Diag(Diag.first.first, Diag.first.second);
2108
0
      for (const auto &Note : Diag.second)
2109
0
        S.Diag(Note.first, Note.second);
2110
0
    }
2111
0
  }
2112
2113
  void warnLoopStateMismatch(SourceLocation Loc,
2114
0
                             StringRef VariableName) override {
2115
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2116
0
      VariableName);
2117
2118
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2119
0
  }
2120
2121
  void warnParamReturnTypestateMismatch(SourceLocation Loc,
2122
                                        StringRef VariableName,
2123
                                        StringRef ExpectedState,
2124
0
                                        StringRef ObservedState) override {
2125
2126
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2127
0
      diag::warn_param_return_typestate_mismatch) << VariableName <<
2128
0
        ExpectedState << ObservedState);
2129
2130
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2131
0
  }
2132
2133
  void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2134
0
                                  StringRef ObservedState) override {
2135
2136
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2137
0
      diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2138
2139
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2140
0
  }
2141
2142
  void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2143
0
                                              StringRef TypeName) override {
2144
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2145
0
      diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2146
2147
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2148
0
  }
2149
2150
  void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2151
0
                                   StringRef ObservedState) override {
2152
2153
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2154
0
      diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2155
2156
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2157
0
  }
2158
2159
  void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2160
0
                                   SourceLocation Loc) override {
2161
2162
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(
2163
0
      diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2164
2165
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2166
0
  }
2167
2168
  void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2169
0
                             StringRef State, SourceLocation Loc) override {
2170
2171
0
    PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2172
0
                                MethodName << VariableName << State);
2173
2174
0
    Warnings.emplace_back(std::move(Warning), OptionalNotes());
2175
0
  }
2176
};
2177
} // anonymous namespace
2178
} // namespace consumed
2179
} // namespace clang
2180
2181
//===----------------------------------------------------------------------===//
2182
// Unsafe buffer usage analysis.
2183
//===----------------------------------------------------------------------===//
2184
2185
namespace {
2186
class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2187
  Sema &S;
2188
  bool SuggestSuggestions;  // Recommend -fsafe-buffer-usage-suggestions?
2189
2190
  // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2191
  // itself:
2192
  std::string listVariableGroupAsString(
2193
0
      const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2194
0
    if (VarGroupForVD.size() <= 1)
2195
0
      return "";
2196
2197
0
    std::vector<StringRef> VarNames;
2198
0
    auto PutInQuotes = [](StringRef S) -> std::string {
2199
0
      return "'" + S.str() + "'";
2200
0
    };
2201
2202
0
    for (auto *V : VarGroupForVD) {
2203
0
      if (V == VD)
2204
0
        continue;
2205
0
      VarNames.push_back(V->getName());
2206
0
    }
2207
0
    if (VarNames.size() == 1) {
2208
0
      return PutInQuotes(VarNames[0]);
2209
0
    }
2210
0
    if (VarNames.size() == 2) {
2211
0
      return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2212
0
    }
2213
0
    assert(VarGroupForVD.size() > 3);
2214
0
    const unsigned N = VarNames.size() -
2215
0
                       2; // need to print the last two names as "..., X, and Y"
2216
0
    std::string AllVars = "";
2217
2218
0
    for (unsigned I = 0; I < N; ++I)
2219
0
      AllVars.append(PutInQuotes(VarNames[I]) + ", ");
2220
0
    AllVars.append(PutInQuotes(VarNames[N]) + ", and " +
2221
0
                   PutInQuotes(VarNames[N + 1]));
2222
0
    return AllVars;
2223
0
  }
2224
2225
public:
2226
  UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2227
0
    : S(S), SuggestSuggestions(SuggestSuggestions) {}
2228
2229
  void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2230
0
                             ASTContext &Ctx) override {
2231
0
    SourceLocation Loc;
2232
0
    SourceRange Range;
2233
0
    unsigned MsgParam = 0;
2234
0
    if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Operation)) {
2235
0
      Loc = ASE->getBase()->getExprLoc();
2236
0
      Range = ASE->getBase()->getSourceRange();
2237
0
      MsgParam = 2;
2238
0
    } else if (const auto *BO = dyn_cast<BinaryOperator>(Operation)) {
2239
0
      BinaryOperator::Opcode Op = BO->getOpcode();
2240
0
      if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2241
0
          Op == BO_SubAssign) {
2242
0
        if (BO->getRHS()->getType()->isIntegerType()) {
2243
0
          Loc = BO->getLHS()->getExprLoc();
2244
0
          Range = BO->getLHS()->getSourceRange();
2245
0
        } else {
2246
0
          Loc = BO->getRHS()->getExprLoc();
2247
0
          Range = BO->getRHS()->getSourceRange();
2248
0
        }
2249
0
        MsgParam = 1;
2250
0
      }
2251
0
    } else if (const auto *UO = dyn_cast<UnaryOperator>(Operation)) {
2252
0
      UnaryOperator::Opcode Op = UO->getOpcode();
2253
0
      if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2254
0
          Op == UO_PostDec) {
2255
0
        Loc = UO->getSubExpr()->getExprLoc();
2256
0
        Range = UO->getSubExpr()->getSourceRange();
2257
0
        MsgParam = 1;
2258
0
      }
2259
0
    } else {
2260
0
      if (isa<CallExpr>(Operation)) {
2261
        // note_unsafe_buffer_operation doesn't have this mode yet.
2262
0
        assert(!IsRelatedToDecl && "Not implemented yet!");
2263
0
        MsgParam = 3;
2264
0
      } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Operation)) {
2265
0
        QualType destType = ECE->getType();
2266
0
        const uint64_t dSize =
2267
0
            Ctx.getTypeSize(destType.getTypePtr()->getPointeeType());
2268
0
        if (const auto *CE = dyn_cast<CXXMemberCallExpr>(ECE->getSubExpr())) {
2269
0
          QualType srcType = CE->getType();
2270
0
          const uint64_t sSize =
2271
0
              Ctx.getTypeSize(srcType.getTypePtr()->getPointeeType());
2272
0
          if (sSize >= dSize)
2273
0
            return;
2274
0
        }
2275
0
        MsgParam = 4;
2276
0
      }
2277
0
      Loc = Operation->getBeginLoc();
2278
0
      Range = Operation->getSourceRange();
2279
0
    }
2280
0
    if (IsRelatedToDecl) {
2281
0
      assert(!SuggestSuggestions &&
2282
0
             "Variables blamed for unsafe buffer usage without suggestions!");
2283
0
      S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2284
0
    } else {
2285
0
      S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2286
0
      if (SuggestSuggestions) {
2287
0
        S.Diag(Loc, diag::note_safe_buffer_usage_suggestions_disabled);
2288
0
      }
2289
0
    }
2290
0
  }
2291
2292
  void handleUnsafeVariableGroup(const VarDecl *Variable,
2293
                                 const VariableGroupsManager &VarGrpMgr,
2294
0
                                 FixItList &&Fixes, const Decl *D) override {
2295
0
    assert(!SuggestSuggestions &&
2296
0
           "Unsafe buffer usage fixits displayed without suggestions!");
2297
0
    S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable)
2298
0
        << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2299
0
        << Variable->getSourceRange();
2300
0
    if (!Fixes.empty()) {
2301
0
      assert(isa<NamedDecl>(D) &&
2302
0
             "Fix-its are generated only for `NamedDecl`s");
2303
0
      const NamedDecl *ND = cast<NamedDecl>(D);
2304
0
      bool BriefMsg = false;
2305
      // If the variable group involves parameters, the diagnostic message will
2306
      // NOT explain how the variables are grouped as the reason is non-trivial
2307
      // and irrelavant to users' experience:
2308
0
      const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Variable, &BriefMsg);
2309
0
      unsigned FixItStrategy = 0; // For now we only have 'std::span' strategy
2310
0
      const auto &FD =
2311
0
          S.Diag(Variable->getLocation(),
2312
0
                 BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2313
0
                          : diag::note_unsafe_buffer_variable_fixit_group);
2314
2315
0
      FD << Variable << FixItStrategy;
2316
0
      FD << listVariableGroupAsString(Variable, VarGroupForVD)
2317
0
         << (VarGroupForVD.size() > 1) << ND;
2318
0
      for (const auto &F : Fixes) {
2319
0
        FD << F;
2320
0
      }
2321
0
    }
2322
2323
0
#ifndef NDEBUG
2324
0
    if (areDebugNotesRequested())
2325
0
      for (const DebugNote &Note: DebugNotesByVar[Variable])
2326
0
        S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2327
0
#endif
2328
0
  }
2329
2330
0
  bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2331
0
    return S.PP.isSafeBufferOptOut(S.getSourceManager(), Loc);
2332
0
  }
2333
2334
  // Returns the text representation of clang::unsafe_buffer_usage attribute.
2335
  // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2336
  // characters.
2337
  std::string
2338
  getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2339
0
                                      StringRef WSSuffix = "") const override {
2340
0
    Preprocessor &PP = S.getPreprocessor();
2341
0
    TokenValue ClangUnsafeBufferUsageTokens[] = {
2342
0
        tok::l_square,
2343
0
        tok::l_square,
2344
0
        PP.getIdentifierInfo("clang"),
2345
0
        tok::coloncolon,
2346
0
        PP.getIdentifierInfo("unsafe_buffer_usage"),
2347
0
        tok::r_square,
2348
0
        tok::r_square};
2349
2350
0
    StringRef MacroName;
2351
2352
    // The returned macro (it returns) is guaranteed not to be function-like:
2353
0
    MacroName = PP.getLastMacroWithSpelling(Loc, ClangUnsafeBufferUsageTokens);
2354
0
    if (MacroName.empty())
2355
0
      MacroName = "[[clang::unsafe_buffer_usage]]";
2356
0
    return MacroName.str() + WSSuffix.str();
2357
0
  }
2358
};
2359
} // namespace
2360
2361
//===----------------------------------------------------------------------===//
2362
// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2363
//  warnings on a function, method, or block.
2364
//===----------------------------------------------------------------------===//
2365
2366
46
sema::AnalysisBasedWarnings::Policy::Policy() {
2367
46
  enableCheckFallThrough = 1;
2368
46
  enableCheckUnreachable = 0;
2369
46
  enableThreadSafetyAnalysis = 0;
2370
46
  enableConsumedAnalysis = 0;
2371
46
}
2372
2373
/// InterProceduralData aims to be a storage of whatever data should be passed
2374
/// between analyses of different functions.
2375
///
2376
/// At the moment, its primary goal is to make the information gathered during
2377
/// the analysis of the blocks available during the analysis of the enclosing
2378
/// function.  This is important due to the fact that blocks are analyzed before
2379
/// the enclosed function is even parsed fully, so it is not viable to access
2380
/// anything in the outer scope while analyzing the block.  On the other hand,
2381
/// re-building CFG for blocks and re-analyzing them when we do have all the
2382
/// information (i.e. during the analysis of the enclosing function) seems to be
2383
/// ill-designed.
2384
class sema::AnalysisBasedWarnings::InterProceduralData {
2385
public:
2386
  // It is important to analyze blocks within functions because it's a very
2387
  // common pattern to capture completion handler parameters by blocks.
2388
  CalledOnceInterProceduralData CalledOnceData;
2389
};
2390
2391
276
static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2392
276
  return (unsigned)!D.isIgnored(diag, SourceLocation());
2393
276
}
2394
2395
sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2396
    : S(s), IPData(std::make_unique<InterProceduralData>()),
2397
      NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2398
      MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2399
      NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2400
      NumUninitAnalysisBlockVisits(0),
2401
46
      MaxUninitAnalysisBlockVisitsPerFunction(0) {
2402
2403
46
  using namespace diag;
2404
46
  DiagnosticsEngine &D = S.getDiagnostics();
2405
2406
46
  DefaultPolicy.enableCheckUnreachable =
2407
46
      isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
2408
46
      isEnabled(D, warn_unreachable_return) ||
2409
46
      isEnabled(D, warn_unreachable_loop_increment);
2410
2411
46
  DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2412
2413
46
  DefaultPolicy.enableConsumedAnalysis =
2414
46
      isEnabled(D, warn_use_in_invalid_state);
2415
46
}
2416
2417
// We need this here for unique_ptr with forward declared class.
2418
46
sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2419
2420
1
static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2421
1
  for (const auto &D : fscope->PossiblyUnreachableDiags)
2422
0
    S.Diag(D.Loc, D.PD);
2423
1
}
2424
2425
// An AST Visitor that calls a callback function on each callable DEFINITION
2426
// that is NOT in a dependent context:
2427
class CallableVisitor : public RecursiveASTVisitor<CallableVisitor> {
2428
private:
2429
  llvm::function_ref<void(const Decl *)> Callback;
2430
2431
public:
2432
  CallableVisitor(llvm::function_ref<void(const Decl *)> Callback)
2433
0
      : Callback(Callback) {}
2434
2435
0
  bool VisitFunctionDecl(FunctionDecl *Node) {
2436
0
    if (cast<DeclContext>(Node)->isDependentContext())
2437
0
      return true; // Not to analyze dependent decl
2438
    // `FunctionDecl->hasBody()` returns true if the function has a body
2439
    // somewhere defined.  But we want to know if this `Node` has a body
2440
    // child.  So we use `doesThisDeclarationHaveABody`:
2441
0
    if (Node->doesThisDeclarationHaveABody())
2442
0
      Callback(Node);
2443
0
    return true;
2444
0
  }
2445
2446
0
  bool VisitBlockDecl(BlockDecl *Node) {
2447
0
    if (cast<DeclContext>(Node)->isDependentContext())
2448
0
      return true; // Not to analyze dependent decl
2449
0
    Callback(Node);
2450
0
    return true;
2451
0
  }
2452
2453
0
  bool VisitObjCMethodDecl(ObjCMethodDecl *Node) {
2454
0
    if (cast<DeclContext>(Node)->isDependentContext())
2455
0
      return true; // Not to analyze dependent decl
2456
0
    if (Node->hasBody())
2457
0
      Callback(Node);
2458
0
    return true;
2459
0
  }
2460
2461
0
  bool VisitLambdaExpr(LambdaExpr *Node) {
2462
0
    return VisitFunctionDecl(Node->getCallOperator());
2463
0
  }
2464
2465
0
  bool shouldVisitTemplateInstantiations() const { return true; }
2466
0
  bool shouldVisitImplicitCode() const { return false; }
2467
};
2468
2469
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2470
46
     TranslationUnitDecl *TU) {
2471
46
  if (!TU)
2472
0
    return; // This is unexpected, give up quietly.
2473
2474
46
  DiagnosticsEngine &Diags = S.getDiagnostics();
2475
2476
46
  if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2477
    // exit if having uncompilable errors or ignoring all warnings:
2478
46
    return;
2479
2480
0
  DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2481
2482
  // UnsafeBufferUsage analysis settings.
2483
0
  bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2484
0
  bool UnsafeBufferUsageShouldEmitSuggestions =  // Should != Can.
2485
0
      UnsafeBufferUsageCanEmitSuggestions &&
2486
0
      DiagOpts.ShowSafeBufferUsageSuggestions;
2487
0
  bool UnsafeBufferUsageShouldSuggestSuggestions =
2488
0
      UnsafeBufferUsageCanEmitSuggestions &&
2489
0
      !DiagOpts.ShowSafeBufferUsageSuggestions;
2490
0
  UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2491
2492
  // The Callback function that performs analyses:
2493
0
  auto CallAnalyzers = [&](const Decl *Node) -> void {
2494
    // Perform unsafe buffer usage analysis:
2495
0
    if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation,
2496
0
                         Node->getBeginLoc()) ||
2497
0
        !Diags.isIgnored(diag::warn_unsafe_buffer_variable,
2498
0
                         Node->getBeginLoc())) {
2499
0
      clang::checkUnsafeBufferUsage(Node, R,
2500
0
                                    UnsafeBufferUsageShouldEmitSuggestions);
2501
0
    }
2502
2503
    // More analysis ...
2504
0
  };
2505
  // Emit per-function analysis-based warnings that require the whole-TU
2506
  // reasoning. Check if any of them is enabled at all before scanning the AST:
2507
0
  if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, SourceLocation()) ||
2508
0
      !Diags.isIgnored(diag::warn_unsafe_buffer_variable, SourceLocation())) {
2509
0
    CallableVisitor(CallAnalyzers).TraverseTranslationUnitDecl(TU);
2510
0
  }
2511
0
}
2512
2513
void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2514
    sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2515
1
    const Decl *D, QualType BlockType) {
2516
2517
  // We avoid doing analysis-based warnings when there are errors for
2518
  // two reasons:
2519
  // (1) The CFGs often can't be constructed (if the body is invalid), so
2520
  //     don't bother trying.
2521
  // (2) The code already has problems; running the analysis just takes more
2522
  //     time.
2523
1
  DiagnosticsEngine &Diags = S.getDiagnostics();
2524
2525
  // Do not do any analysis if we are going to just ignore them.
2526
1
  if (Diags.getIgnoreAllWarnings() ||
2527
1
      (Diags.getSuppressSystemWarnings() &&
2528
1
       S.SourceMgr.isInSystemHeader(D->getLocation())))
2529
0
    return;
2530
2531
  // For code in dependent contexts, we'll do this at instantiation time.
2532
1
  if (cast<DeclContext>(D)->isDependentContext())
2533
0
    return;
2534
2535
1
  if (S.hasUncompilableErrorOccurred()) {
2536
    // Flush out any possibly unreachable diagnostics.
2537
1
    flushDiagnostics(S, fscope);
2538
1
    return;
2539
1
  }
2540
2541
0
  const Stmt *Body = D->getBody();
2542
0
  assert(Body);
2543
2544
  // Construct the analysis context with the specified CFG build options.
2545
0
  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2546
2547
  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2548
  // explosion for destructors that can result and the compile time hit.
2549
0
  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2550
0
  AC.getCFGBuildOptions().AddEHEdges = false;
2551
0
  AC.getCFGBuildOptions().AddInitializers = true;
2552
0
  AC.getCFGBuildOptions().AddImplicitDtors = true;
2553
0
  AC.getCFGBuildOptions().AddTemporaryDtors = true;
2554
0
  AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2555
0
  AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2556
2557
  // Force that certain expressions appear as CFGElements in the CFG.  This
2558
  // is used to speed up various analyses.
2559
  // FIXME: This isn't the right factoring.  This is here for initial
2560
  // prototyping, but we need a way for analyses to say what expressions they
2561
  // expect to always be CFGElements and then fill in the BuildOptions
2562
  // appropriately.  This is essentially a layering violation.
2563
0
  if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2564
0
      P.enableConsumedAnalysis) {
2565
    // Unreachable code analysis and thread safety require a linearized CFG.
2566
0
    AC.getCFGBuildOptions().setAllAlwaysAdd();
2567
0
  }
2568
0
  else {
2569
0
    AC.getCFGBuildOptions()
2570
0
      .setAlwaysAdd(Stmt::BinaryOperatorClass)
2571
0
      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2572
0
      .setAlwaysAdd(Stmt::BlockExprClass)
2573
0
      .setAlwaysAdd(Stmt::CStyleCastExprClass)
2574
0
      .setAlwaysAdd(Stmt::DeclRefExprClass)
2575
0
      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2576
0
      .setAlwaysAdd(Stmt::UnaryOperatorClass);
2577
0
  }
2578
2579
  // Install the logical handler.
2580
0
  std::optional<LogicalErrorHandler> LEH;
2581
0
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2582
0
    LEH.emplace(S);
2583
0
    AC.getCFGBuildOptions().Observer = &*LEH;
2584
0
  }
2585
2586
  // Emit delayed diagnostics.
2587
0
  if (!fscope->PossiblyUnreachableDiags.empty()) {
2588
0
    bool analyzed = false;
2589
2590
    // Register the expressions with the CFGBuilder.
2591
0
    for (const auto &D : fscope->PossiblyUnreachableDiags) {
2592
0
      for (const Stmt *S : D.Stmts)
2593
0
        AC.registerForcedBlockExpression(S);
2594
0
    }
2595
2596
0
    if (AC.getCFG()) {
2597
0
      analyzed = true;
2598
0
      for (const auto &D : fscope->PossiblyUnreachableDiags) {
2599
0
        bool AllReachable = true;
2600
0
        for (const Stmt *S : D.Stmts) {
2601
0
          const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2602
0
          CFGReverseBlockReachabilityAnalysis *cra =
2603
0
              AC.getCFGReachablityAnalysis();
2604
          // FIXME: We should be able to assert that block is non-null, but
2605
          // the CFG analysis can skip potentially-evaluated expressions in
2606
          // edge cases; see test/Sema/vla-2.c.
2607
0
          if (block && cra) {
2608
            // Can this block be reached from the entrance?
2609
0
            if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2610
0
              AllReachable = false;
2611
0
              break;
2612
0
            }
2613
0
          }
2614
          // If we cannot map to a basic block, assume the statement is
2615
          // reachable.
2616
0
        }
2617
2618
0
        if (AllReachable)
2619
0
          S.Diag(D.Loc, D.PD);
2620
0
      }
2621
0
    }
2622
2623
0
    if (!analyzed)
2624
0
      flushDiagnostics(S, fscope);
2625
0
  }
2626
2627
  // Warning: check missing 'return'
2628
0
  if (P.enableCheckFallThrough) {
2629
0
    const CheckFallThroughDiagnostics &CD =
2630
0
        (isa<BlockDecl>(D)
2631
0
             ? CheckFallThroughDiagnostics::MakeForBlock()
2632
0
             : (isa<CXXMethodDecl>(D) &&
2633
0
                cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2634
0
                cast<CXXMethodDecl>(D)->getParent()->isLambda())
2635
0
                   ? CheckFallThroughDiagnostics::MakeForLambda()
2636
0
                   : (fscope->isCoroutine()
2637
0
                          ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2638
0
                          : CheckFallThroughDiagnostics::MakeForFunction(D)));
2639
0
    CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2640
0
  }
2641
2642
  // Warning: check for unreachable code
2643
0
  if (P.enableCheckUnreachable) {
2644
    // Only check for unreachable code on non-template instantiations.
2645
    // Different template instantiations can effectively change the control-flow
2646
    // and it is very difficult to prove that a snippet of code in a template
2647
    // is unreachable for all instantiations.
2648
0
    bool isTemplateInstantiation = false;
2649
0
    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2650
0
      isTemplateInstantiation = Function->isTemplateInstantiation();
2651
0
    if (!isTemplateInstantiation)
2652
0
      CheckUnreachable(S, AC);
2653
0
  }
2654
2655
  // Check for thread safety violations
2656
0
  if (P.enableThreadSafetyAnalysis) {
2657
0
    SourceLocation FL = AC.getDecl()->getLocation();
2658
0
    SourceLocation FEL = AC.getDecl()->getEndLoc();
2659
0
    threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2660
0
    if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2661
0
      Reporter.setIssueBetaWarnings(true);
2662
0
    if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2663
0
      Reporter.setVerbose(true);
2664
2665
0
    threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2666
0
                                          &S.ThreadSafetyDeclCache);
2667
0
    Reporter.emitDiagnostics();
2668
0
  }
2669
2670
  // Check for violations of consumed properties.
2671
0
  if (P.enableConsumedAnalysis) {
2672
0
    consumed::ConsumedWarningsHandler WarningHandler(S);
2673
0
    consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2674
0
    Analyzer.run(AC);
2675
0
  }
2676
2677
0
  if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2678
0
      !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2679
0
      !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2680
0
      !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2681
0
    if (CFG *cfg = AC.getCFG()) {
2682
0
      UninitValsDiagReporter reporter(S);
2683
0
      UninitVariablesAnalysisStats stats;
2684
0
      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2685
0
      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2686
0
                                        reporter, stats);
2687
2688
0
      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2689
0
        ++NumUninitAnalysisFunctions;
2690
0
        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2691
0
        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2692
0
        MaxUninitAnalysisVariablesPerFunction =
2693
0
            std::max(MaxUninitAnalysisVariablesPerFunction,
2694
0
                     stats.NumVariablesAnalyzed);
2695
0
        MaxUninitAnalysisBlockVisitsPerFunction =
2696
0
            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2697
0
                     stats.NumBlockVisits);
2698
0
      }
2699
0
    }
2700
0
  }
2701
2702
  // Check for violations of "called once" parameter properties.
2703
0
  if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2704
0
      shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
2705
0
    if (AC.getCFG()) {
2706
0
      CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2707
0
      checkCalledOnceParameters(
2708
0
          AC, Reporter,
2709
0
          shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2710
0
    }
2711
0
  }
2712
2713
0
  bool FallThroughDiagFull =
2714
0
      !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2715
0
  bool FallThroughDiagPerFunction = !Diags.isIgnored(
2716
0
      diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2717
0
  if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2718
0
      fscope->HasFallthroughStmt) {
2719
0
    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2720
0
  }
2721
2722
0
  if (S.getLangOpts().ObjCWeak &&
2723
0
      !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2724
0
    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2725
2726
2727
  // Check for infinite self-recursion in functions
2728
0
  if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2729
0
                       D->getBeginLoc())) {
2730
0
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2731
0
      checkRecursiveFunction(S, FD, Body, AC);
2732
0
    }
2733
0
  }
2734
2735
  // Check for throw out of non-throwing function.
2736
0
  if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2737
0
    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2738
0
      if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
2739
0
        checkThrowInNonThrowingFunc(S, FD, AC);
2740
2741
  // If none of the previous checks caused a CFG build, trigger one here
2742
  // for the logical error handler.
2743
0
  if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2744
0
    AC.getCFG();
2745
0
  }
2746
2747
  // Collect statistics about the CFG if it was built.
2748
0
  if (S.CollectStats && AC.isCFGBuilt()) {
2749
0
    ++NumFunctionsAnalyzed;
2750
0
    if (CFG *cfg = AC.getCFG()) {
2751
      // If we successfully built a CFG for this context, record some more
2752
      // detail information about it.
2753
0
      NumCFGBlocks += cfg->getNumBlockIDs();
2754
0
      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2755
0
                                         cfg->getNumBlockIDs());
2756
0
    } else {
2757
0
      ++NumFunctionsWithBadCFGs;
2758
0
    }
2759
0
  }
2760
0
}
2761
2762
0
void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2763
0
  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2764
2765
0
  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2766
0
  unsigned AvgCFGBlocksPerFunction =
2767
0
      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2768
0
  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2769
0
               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2770
0
               << "  " << NumCFGBlocks << " CFG blocks built.\n"
2771
0
               << "  " << AvgCFGBlocksPerFunction
2772
0
               << " average CFG blocks per function.\n"
2773
0
               << "  " << MaxCFGBlocksPerFunction
2774
0
               << " max CFG blocks per function.\n";
2775
2776
0
  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2777
0
      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2778
0
  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2779
0
      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2780
0
  llvm::errs() << NumUninitAnalysisFunctions
2781
0
               << " functions analyzed for uninitialiazed variables\n"
2782
0
               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2783
0
               << "  " << AvgUninitVariablesPerFunction
2784
0
               << " average variables per function.\n"
2785
0
               << "  " << MaxUninitAnalysisVariablesPerFunction
2786
0
               << " max variables per function.\n"
2787
0
               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2788
0
               << "  " << AvgUninitBlockVisitsPerFunction
2789
0
               << " average block visits per function.\n"
2790
0
               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2791
0
               << " max block visits per function.\n";
2792
0
}