1//=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines analysis_warnings::[Policy,Executor].
10// Together they are used by Sema to issue warnings based on inexpensive
11// static analysis algorithms in libAnalysis.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/Sema/AnalysisBasedWarnings.h"
16#include "SemaLifetimeSafety.h"
17#include "TypeLocBuilder.h"
18#include "clang/AST/Decl.h"
19#include "clang/AST/DeclCXX.h"
20#include "clang/AST/DeclObjC.h"
21#include "clang/AST/DynamicRecursiveASTVisitor.h"
22#include "clang/AST/EvaluatedExprVisitor.h"
23#include "clang/AST/Expr.h"
24#include "clang/AST/ExprCXX.h"
25#include "clang/AST/ExprObjC.h"
26#include "clang/AST/OperationKinds.h"
27#include "clang/AST/ParentMap.h"
28#include "clang/AST/StmtCXX.h"
29#include "clang/AST/StmtObjC.h"
30#include "clang/AST/StmtVisitor.h"
31#include "clang/AST/Type.h"
32#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
33#include "clang/Analysis/Analyses/CalledOnceCheck.h"
34#include "clang/Analysis/Analyses/Consumed.h"
35#include "clang/Analysis/Analyses/LifetimeSafety/LifetimeSafety.h"
36#include "clang/Analysis/Analyses/ReachableCode.h"
37#include "clang/Analysis/Analyses/ThreadSafety.h"
38#include "clang/Analysis/Analyses/UninitializedValues.h"
39#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
40#include "clang/Analysis/AnalysisDeclContext.h"
41#include "clang/Analysis/CFG.h"
42#include "clang/Analysis/CallGraph.h"
43#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
44#include "clang/Basic/Diagnostic.h"
45#include "clang/Basic/DiagnosticSema.h"
46#include "clang/Basic/SourceLocation.h"
47#include "clang/Basic/SourceManager.h"
48#include "clang/Lex/Preprocessor.h"
49#include "clang/Sema/ScopeInfo.h"
50#include "clang/Sema/SemaInternal.h"
51#include "llvm/ADT/ArrayRef.h"
52#include "llvm/ADT/BitVector.h"
53#include "llvm/ADT/DenseMap.h"
54#include "llvm/ADT/MapVector.h"
55#include "llvm/ADT/PostOrderIterator.h"
56#include "llvm/ADT/STLFunctionalExtras.h"
57#include "llvm/ADT/SmallVector.h"
58#include "llvm/ADT/StringExtras.h"
59#include "llvm/ADT/StringRef.h"
60#include "llvm/Support/Debug.h"
61#include "llvm/Support/TimeProfiler.h"
62#include <algorithm>
63#include <deque>
64#include <iterator>
65#include <optional>
66
67using namespace clang;
68
69//===----------------------------------------------------------------------===//
70// Unreachable code analysis.
71//===----------------------------------------------------------------------===//
72
73namespace {
74 class UnreachableCodeHandler : public reachable_code::Callback {
75 Sema &S;
76 SourceRange PreviousSilenceableCondVal;
77
78 public:
79 UnreachableCodeHandler(Sema &s) : S(s) {}
80
81 void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
82 SourceRange SilenceableCondVal, SourceRange R1,
83 SourceRange R2, bool HasFallThroughAttr) override {
84 // If the diagnosed code is `[[fallthrough]];` and
85 // `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
86 // be executed` warning to avoid generating diagnostic twice
87 if (HasFallThroughAttr &&
88 !S.getDiagnostics().isIgnored(DiagID: diag::warn_unreachable_fallthrough_attr,
89 Loc: SourceLocation()))
90 return;
91
92 // Avoid reporting multiple unreachable code diagnostics that are
93 // triggered by the same conditional value.
94 if (PreviousSilenceableCondVal.isValid() &&
95 SilenceableCondVal.isValid() &&
96 PreviousSilenceableCondVal == SilenceableCondVal)
97 return;
98 PreviousSilenceableCondVal = SilenceableCondVal;
99
100 unsigned diag = diag::warn_unreachable;
101 switch (UK) {
102 case reachable_code::UK_Break:
103 diag = diag::warn_unreachable_break;
104 break;
105 case reachable_code::UK_Return:
106 diag = diag::warn_unreachable_return;
107 break;
108 case reachable_code::UK_Loop_Increment:
109 diag = diag::warn_unreachable_loop_increment;
110 break;
111 case reachable_code::UK_Other:
112 break;
113 }
114
115 S.Diag(Loc: L, DiagID: diag) << R1 << R2;
116
117 SourceLocation Open = SilenceableCondVal.getBegin();
118 if (Open.isValid()) {
119 SourceLocation Close = SilenceableCondVal.getEnd();
120 Close = S.getLocForEndOfToken(Loc: Close);
121 if (Close.isValid()) {
122 S.Diag(Loc: Open, DiagID: diag::note_unreachable_silence)
123 << FixItHint::CreateInsertion(InsertionLoc: Open, Code: "/* DISABLES CODE */ (")
124 << FixItHint::CreateInsertion(InsertionLoc: Close, Code: ")");
125 }
126 }
127 }
128 };
129} // anonymous namespace
130
131/// CheckUnreachable - Check for unreachable code.
132static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
133 // As a heuristic prune all diagnostics not in the main file. Currently
134 // the majority of warnings in headers are false positives. These
135 // are largely caused by configuration state, e.g. preprocessor
136 // defined code, etc.
137 //
138 // Note that this is also a performance optimization. Analyzing
139 // headers many times can be expensive.
140 if (!S.getSourceManager().isInMainFile(Loc: AC.getDecl()->getBeginLoc()))
141 return;
142
143 UnreachableCodeHandler UC(S);
144 reachable_code::FindUnreachableCode(AC, PP&: S.getPreprocessor(), CB&: UC);
145}
146
147namespace {
148/// Warn on logical operator errors in CFGBuilder
149class LogicalErrorHandler : public CFGCallback {
150 Sema &S;
151
152public:
153 LogicalErrorHandler(Sema &S) : S(S) {}
154
155 static bool HasMacroID(const Expr *E) {
156 if (E->getExprLoc().isMacroID())
157 return true;
158
159 // Recurse to children.
160 for (const Stmt *SubStmt : E->children())
161 if (const Expr *SubExpr = dyn_cast_or_null<Expr>(Val: SubStmt))
162 if (HasMacroID(E: SubExpr))
163 return true;
164
165 return false;
166 }
167
168 void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
169 if (HasMacroID(E: B))
170 return;
171
172 unsigned DiagID = isAlwaysTrue
173 ? diag::warn_tautological_negation_or_compare
174 : diag::warn_tautological_negation_and_compare;
175 SourceRange DiagRange = B->getSourceRange();
176 S.Diag(Loc: B->getExprLoc(), DiagID) << DiagRange;
177 }
178
179 void compareAlwaysTrue(const BinaryOperator *B,
180 bool isAlwaysTrueOrFalse) override {
181 if (HasMacroID(E: B))
182 return;
183
184 SourceRange DiagRange = B->getSourceRange();
185 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_tautological_overlap_comparison)
186 << DiagRange << isAlwaysTrueOrFalse;
187 }
188
189 void compareBitwiseEquality(const BinaryOperator *B,
190 bool isAlwaysTrue) override {
191 if (HasMacroID(E: B))
192 return;
193
194 SourceRange DiagRange = B->getSourceRange();
195 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_always)
196 << DiagRange << isAlwaysTrue;
197 }
198
199 void compareBitwiseOr(const BinaryOperator *B) override {
200 if (HasMacroID(E: B))
201 return;
202
203 SourceRange DiagRange = B->getSourceRange();
204 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_or) << DiagRange;
205 }
206
207 static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
208 SourceLocation Loc) {
209 return !Diags.isIgnored(DiagID: diag::warn_tautological_overlap_comparison, Loc) ||
210 !Diags.isIgnored(DiagID: diag::warn_comparison_bitwise_or, Loc) ||
211 !Diags.isIgnored(DiagID: diag::warn_tautological_negation_and_compare, Loc);
212 }
213};
214} // anonymous namespace
215
216//===----------------------------------------------------------------------===//
217// Check for infinite self-recursion in functions
218//===----------------------------------------------------------------------===//
219
220// Returns true if the function is called anywhere within the CFGBlock.
221// For member functions, the additional condition of being call from the
222// this pointer is required.
223static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
224 // Process all the Stmt's in this block to find any calls to FD.
225 for (const auto &B : Block) {
226 if (B.getKind() != CFGElement::Statement)
227 continue;
228
229 const CallExpr *CE = dyn_cast<CallExpr>(Val: B.getAs<CFGStmt>()->getStmt());
230 if (!CE || !CE->getCalleeDecl() ||
231 CE->getCalleeDecl()->getCanonicalDecl() != FD)
232 continue;
233
234 // Skip function calls which are qualified with a templated class.
235 if (const DeclRefExpr *DRE =
236 dyn_cast<DeclRefExpr>(Val: CE->getCallee()->IgnoreParenImpCasts()))
237 if (NestedNameSpecifier NNS = DRE->getQualifier();
238 NNS.getKind() == NestedNameSpecifier::Kind::Type)
239 if (isa_and_nonnull<TemplateSpecializationType>(Val: NNS.getAsType()))
240 continue;
241
242 const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(Val: CE);
243 if (!MCE || isa<CXXThisExpr>(Val: MCE->getImplicitObjectArgument()) ||
244 !MCE->getMethodDecl()->isVirtual())
245 return true;
246 }
247 return false;
248}
249
250// Returns true if every path from the entry block passes through a call to FD.
251static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
252 llvm::SmallPtrSet<CFGBlock *, 16> Visited;
253 llvm::SmallVector<CFGBlock *, 16> WorkList;
254 // Keep track of whether we found at least one recursive path.
255 bool foundRecursion = false;
256
257 const unsigned ExitID = cfg->getExit().getBlockID();
258
259 // Seed the work list with the entry block.
260 WorkList.push_back(Elt: &cfg->getEntry());
261
262 while (!WorkList.empty()) {
263 CFGBlock *Block = WorkList.pop_back_val();
264
265 for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
266 if (CFGBlock *SuccBlock = *I) {
267 if (!Visited.insert(Ptr: SuccBlock).second)
268 continue;
269
270 // Found a path to the exit node without a recursive call.
271 if (ExitID == SuccBlock->getBlockID())
272 return false;
273
274 // If the successor block contains a recursive call, end analysis there.
275 if (hasRecursiveCallInPath(FD, Block&: *SuccBlock)) {
276 foundRecursion = true;
277 continue;
278 }
279
280 WorkList.push_back(Elt: SuccBlock);
281 }
282 }
283 }
284 return foundRecursion;
285}
286
287static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
288 const Stmt *Body, AnalysisDeclContext &AC) {
289 FD = FD->getCanonicalDecl();
290
291 // Only run on non-templated functions and non-templated members of
292 // templated classes.
293 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
294 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
295 return;
296
297 CFG *cfg = AC.getCFG();
298 if (!cfg) return;
299
300 // If the exit block is unreachable, skip processing the function.
301 if (cfg->getExit().pred_empty())
302 return;
303
304 // Emit diagnostic if a recursive function call is detected for all paths.
305 if (checkForRecursiveFunctionCall(FD, cfg))
306 S.Diag(Loc: Body->getBeginLoc(), DiagID: diag::warn_infinite_recursive_function);
307}
308
309//===----------------------------------------------------------------------===//
310// Check for throw in a non-throwing function.
311//===----------------------------------------------------------------------===//
312
313/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
314/// can reach ExitBlock.
315static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
316 CFG *Body) {
317 SmallVector<CFGBlock *, 16> Stack;
318 llvm::BitVector Queued(Body->getNumBlockIDs());
319
320 Stack.push_back(Elt: &ThrowBlock);
321 Queued[ThrowBlock.getBlockID()] = true;
322
323 while (!Stack.empty()) {
324 CFGBlock &UnwindBlock = *Stack.pop_back_val();
325
326 for (auto &Succ : UnwindBlock.succs()) {
327 if (!Succ.isReachable() || Queued[Succ->getBlockID()])
328 continue;
329
330 if (Succ->getBlockID() == Body->getExit().getBlockID())
331 return true;
332
333 if (auto *Catch =
334 dyn_cast_or_null<CXXCatchStmt>(Val: Succ->getLabel())) {
335 QualType Caught = Catch->getCaughtType();
336 if (Caught.isNull() || // catch (...) catches everything
337 !E->getSubExpr() || // throw; is considered cuaght by any handler
338 S.handlerCanCatch(HandlerType: Caught, ExceptionType: E->getSubExpr()->getType()))
339 // Exception doesn't escape via this path.
340 break;
341 } else {
342 Stack.push_back(Elt: Succ);
343 Queued[Succ->getBlockID()] = true;
344 }
345 }
346 }
347
348 return false;
349}
350
351static void visitReachableThrows(
352 CFG *BodyCFG,
353 llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
354 llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
355 clang::reachable_code::ScanReachableFromBlock(Start: &BodyCFG->getEntry(), Reachable);
356 for (CFGBlock *B : *BodyCFG) {
357 if (!Reachable[B->getBlockID()])
358 continue;
359 for (CFGElement &E : *B) {
360 std::optional<CFGStmt> S = E.getAs<CFGStmt>();
361 if (!S)
362 continue;
363 if (auto *Throw = dyn_cast<CXXThrowExpr>(Val: S->getStmt()))
364 Visit(Throw, *B);
365 }
366 }
367}
368
369static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
370 const FunctionDecl *FD) {
371 if (!S.getSourceManager().isInSystemHeader(Loc: OpLoc) &&
372 FD->getTypeSourceInfo()) {
373 S.Diag(Loc: OpLoc, DiagID: diag::warn_throw_in_noexcept_func) << FD;
374 if (S.getLangOpts().CPlusPlus11 &&
375 (isa<CXXDestructorDecl>(Val: FD) ||
376 FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
377 FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
378 if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
379 getAs<FunctionProtoType>())
380 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_dtor)
381 << !isa<CXXDestructorDecl>(Val: FD) << !Ty->hasExceptionSpec()
382 << FD->getExceptionSpecSourceRange();
383 } else
384 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_function)
385 << FD->getExceptionSpecSourceRange();
386 }
387}
388
389static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
390 AnalysisDeclContext &AC) {
391 CFG *BodyCFG = AC.getCFG();
392 if (!BodyCFG)
393 return;
394 if (BodyCFG->getExit().pred_empty())
395 return;
396 visitReachableThrows(BodyCFG, Visit: [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
397 if (throwEscapes(S, E: Throw, ThrowBlock&: Block, Body: BodyCFG))
398 EmitDiagForCXXThrowInNonThrowingFunc(S, OpLoc: Throw->getThrowLoc(), FD);
399 });
400}
401
402static bool isNoexcept(const FunctionDecl *FD) {
403 const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
404 if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
405 return true;
406 return false;
407}
408
409/// Checks if the given expression is a reference to a function with
410/// 'noreturn' attribute.
411static bool isReferenceToNoReturn(const Expr *E) {
412 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: E->IgnoreParenCasts()))
413 if (auto *FD = dyn_cast<FunctionDecl>(Val: DRef->getDecl()))
414 return FD->isNoReturn();
415 return false;
416}
417
418/// Checks if the given variable, which is assumed to be a function pointer, is
419/// initialized with a function having 'noreturn' attribute.
420static bool isInitializedWithNoReturn(const VarDecl *VD) {
421 if (const Expr *Init = VD->getInit()) {
422 if (auto *ListInit = dyn_cast<InitListExpr>(Val: Init);
423 ListInit && ListInit->getNumInits() > 0)
424 Init = ListInit->getInit(Init: 0);
425 return isReferenceToNoReturn(E: Init);
426 }
427 return false;
428}
429
430namespace {
431
432/// Looks for statements, that can define value of the given variable.
433struct TransferFunctions : public StmtVisitor<TransferFunctions> {
434 const VarDecl *Var;
435 std::optional<bool> AllValuesAreNoReturn;
436
437 TransferFunctions(const VarDecl *VD) : Var(VD) {}
438
439 void reset() { AllValuesAreNoReturn = std::nullopt; }
440
441 void VisitDeclStmt(DeclStmt *DS) {
442 for (auto *DI : DS->decls())
443 if (auto *VD = dyn_cast<VarDecl>(Val: DI))
444 if (VarDecl *Def = VD->getDefinition())
445 if (Def == Var)
446 AllValuesAreNoReturn = isInitializedWithNoReturn(VD: Def);
447 }
448
449 void VisitUnaryOperator(UnaryOperator *UO) {
450 if (UO->getOpcode() == UO_AddrOf) {
451 if (auto *DRef =
452 dyn_cast<DeclRefExpr>(Val: UO->getSubExpr()->IgnoreParenCasts()))
453 if (DRef->getDecl() == Var)
454 AllValuesAreNoReturn = false;
455 }
456 }
457
458 void VisitBinaryOperator(BinaryOperator *BO) {
459 if (BO->getOpcode() == BO_Assign)
460 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: BO->getLHS()->IgnoreParenCasts()))
461 if (DRef->getDecl() == Var)
462 AllValuesAreNoReturn = isReferenceToNoReturn(E: BO->getRHS());
463 }
464
465 void VisitCallExpr(CallExpr *CE) {
466 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end(); I != E;
467 ++I) {
468 const Expr *Arg = *I;
469 if (Arg->isGLValue() && !Arg->getType().isConstQualified())
470 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: Arg->IgnoreParenCasts()))
471 if (auto VD = dyn_cast<VarDecl>(Val: DRef->getDecl()))
472 if (VD->getDefinition() == Var)
473 AllValuesAreNoReturn = false;
474 }
475 }
476};
477} // namespace
478
479// Checks if all possible values of the given variable are functions with
480// 'noreturn' attribute.
481static bool areAllValuesNoReturn(const VarDecl *VD, const CFGBlock &VarBlk,
482 AnalysisDeclContext &AC) {
483 // The set of possible values of a constant variable is determined by
484 // its initializer, unless it is a function parameter.
485 if (!isa<ParmVarDecl>(Val: VD) && VD->getType().isConstant(Ctx: AC.getASTContext())) {
486 if (const VarDecl *Def = VD->getDefinition())
487 return isInitializedWithNoReturn(VD: Def);
488 return false;
489 }
490
491 // In multithreaded environment the value of a global variable may be changed
492 // asynchronously.
493 if (!VD->getDeclContext()->isFunctionOrMethod())
494 return false;
495
496 // Check the condition "all values are noreturn". It is satisfied if the
497 // variable is set to "noreturn" value in the current block or all its
498 // predecessors satisfies the condition.
499 using MapTy = llvm::DenseMap<const CFGBlock *, std::optional<bool>>;
500 using ValueTy = MapTy::value_type;
501 MapTy BlocksToCheck;
502 BlocksToCheck[&VarBlk] = std::nullopt;
503 const auto BlockSatisfiesCondition = [](ValueTy Item) {
504 return Item.getSecond().value_or(u: false);
505 };
506
507 TransferFunctions TF(VD);
508 BackwardDataflowWorklist Worklist(*AC.getCFG(), AC);
509 llvm::DenseSet<const CFGBlock *> Visited;
510 Worklist.enqueueBlock(Block: &VarBlk);
511 while (const CFGBlock *B = Worklist.dequeue()) {
512 if (Visited.contains(V: B))
513 continue;
514 Visited.insert(V: B);
515 // First check the current block.
516 for (CFGBlock::const_reverse_iterator ri = B->rbegin(), re = B->rend();
517 ri != re; ++ri) {
518 if (std::optional<CFGStmt> cs = ri->getAs<CFGStmt>()) {
519 const Stmt *S = cs->getStmt();
520 TF.reset();
521 TF.Visit(S: const_cast<Stmt *>(S));
522 if (TF.AllValuesAreNoReturn) {
523 if (!TF.AllValuesAreNoReturn.value())
524 return false;
525 BlocksToCheck[B] = true;
526 break;
527 }
528 }
529 }
530
531 // If all checked blocks satisfy the condition, the check is finished.
532 if (llvm::all_of(Range&: BlocksToCheck, P: BlockSatisfiesCondition))
533 return true;
534
535 // If this block does not contain the variable definition, check
536 // its predecessors.
537 if (!BlocksToCheck[B]) {
538 Worklist.enqueuePredecessors(Block: B);
539 BlocksToCheck.erase(Val: B);
540 for (const auto &PredBlk : B->preds())
541 if (!BlocksToCheck.contains(Val: PredBlk))
542 BlocksToCheck[PredBlk] = std::nullopt;
543 }
544 }
545
546 return false;
547}
548
549//===----------------------------------------------------------------------===//
550// Check for missing return value.
551//===----------------------------------------------------------------------===//
552
553enum ControlFlowKind {
554 UnknownFallThrough,
555 NeverFallThrough,
556 MaybeFallThrough,
557 AlwaysFallThrough,
558 NeverFallThroughOrReturn
559};
560
561/// CheckFallThrough - Check that we don't fall off the end of a
562/// Statement that should return a value.
563///
564/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
565/// MaybeFallThrough iff we might or might not fall off the end,
566/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
567/// return. We assume NeverFallThrough iff we never fall off the end of the
568/// statement but we may return. We assume that functions not marked noreturn
569/// will return.
570static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
571 CFG *cfg = AC.getCFG();
572 if (!cfg) return UnknownFallThrough;
573
574 // The CFG leaves in dead things, and we don't want the dead code paths to
575 // confuse us, so we mark all live things first.
576 llvm::BitVector live(cfg->getNumBlockIDs());
577 unsigned count =
578 reachable_code::ScanReachableFromBlock(Start: &cfg->getEntry(), Reachable&: live);
579
580 bool AddEHEdges = AC.getAddEHEdges();
581 if (!AddEHEdges && count != cfg->getNumBlockIDs())
582 // When there are things remaining dead, and we didn't add EH edges
583 // from CallExprs to the catch clauses, we have to go back and
584 // mark them as live.
585 for (const auto *B : *cfg) {
586 if (!live[B->getBlockID()]) {
587 if (B->preds().empty()) {
588 const Stmt *Term = B->getTerminatorStmt();
589 if (isa_and_nonnull<CXXTryStmt>(Val: Term))
590 // When not adding EH edges from calls, catch clauses
591 // can otherwise seem dead. Avoid noting them as dead.
592 count += reachable_code::ScanReachableFromBlock(Start: B, Reachable&: live);
593 continue;
594 }
595 }
596 }
597
598 // Now we know what is live, we check the live precessors of the exit block
599 // and look for fall through paths, being careful to ignore normal returns,
600 // and exceptional paths.
601 bool HasLiveReturn = false;
602 bool HasFakeEdge = false;
603 bool HasPlainEdge = false;
604 bool HasAbnormalEdge = false;
605
606 // Ignore default cases that aren't likely to be reachable because all
607 // enums in a switch(X) have explicit case statements.
608 CFGBlock::FilterOptions FO;
609 FO.IgnoreDefaultsWithCoveredEnums = 1;
610
611 for (CFGBlock::filtered_pred_iterator I =
612 cfg->getExit().filtered_pred_start_end(f: FO);
613 I.hasMore(); ++I) {
614 const CFGBlock &B = **I;
615 if (!live[B.getBlockID()])
616 continue;
617
618 // Skip blocks which contain an element marked as no-return. They don't
619 // represent actually viable edges into the exit block, so mark them as
620 // abnormal.
621 if (B.hasNoReturnElement()) {
622 HasAbnormalEdge = true;
623 continue;
624 }
625
626 // Destructors can appear after the 'return' in the CFG. This is
627 // normal. We need to look pass the destructors for the return
628 // statement (if it exists).
629 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
630
631 for ( ; ri != re ; ++ri)
632 if (ri->getAs<CFGStmt>())
633 break;
634
635 // No more CFGElements in the block?
636 if (ri == re) {
637 const Stmt *Term = B.getTerminatorStmt();
638 if (Term && (isa<CXXTryStmt>(Val: Term) || isa<ObjCAtTryStmt>(Val: Term))) {
639 HasAbnormalEdge = true;
640 continue;
641 }
642 // A labeled empty statement, or the entry block...
643 HasPlainEdge = true;
644 continue;
645 }
646
647 CFGStmt CS = ri->castAs<CFGStmt>();
648 const Stmt *S = CS.getStmt();
649 if (isa<ReturnStmt>(Val: S) || isa<CoreturnStmt>(Val: S)) {
650 HasLiveReturn = true;
651 continue;
652 }
653 if (isa<ObjCAtThrowStmt>(Val: S)) {
654 HasFakeEdge = true;
655 continue;
656 }
657 if (isa<CXXThrowExpr>(Val: S)) {
658 HasFakeEdge = true;
659 continue;
660 }
661 if (isa<MSAsmStmt>(Val: S)) {
662 // TODO: Verify this is correct.
663 HasFakeEdge = true;
664 HasLiveReturn = true;
665 continue;
666 }
667 if (isa<CXXTryStmt>(Val: S)) {
668 HasAbnormalEdge = true;
669 continue;
670 }
671 if (!llvm::is_contained(Range: B.succs(), Element: &cfg->getExit())) {
672 HasAbnormalEdge = true;
673 continue;
674 }
675 if (auto *Call = dyn_cast<CallExpr>(Val: S)) {
676 const Expr *Callee = Call->getCallee();
677 if (Callee->getType()->isPointerType())
678 if (auto *DeclRef =
679 dyn_cast<DeclRefExpr>(Val: Callee->IgnoreParenImpCasts()))
680 if (auto *VD = dyn_cast<VarDecl>(Val: DeclRef->getDecl()))
681 if (areAllValuesNoReturn(VD, VarBlk: B, AC)) {
682 HasAbnormalEdge = true;
683 continue;
684 }
685 }
686
687 HasPlainEdge = true;
688 }
689 if (!HasPlainEdge) {
690 if (HasLiveReturn)
691 return NeverFallThrough;
692 return NeverFallThroughOrReturn;
693 }
694 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
695 return MaybeFallThrough;
696 // This says AlwaysFallThrough for calls to functions that are not marked
697 // noreturn, that don't return. If people would like this warning to be more
698 // accurate, such functions should be marked as noreturn.
699 return AlwaysFallThrough;
700}
701
702namespace {
703
704struct CheckFallThroughDiagnostics {
705 unsigned diag_FallThrough_HasNoReturn = 0;
706 unsigned diag_FallThrough_ReturnsNonVoid = 0;
707 unsigned diag_NeverFallThroughOrReturn = 0;
708 unsigned FunKind; // TODO: use diag::FalloffFunctionKind
709 SourceLocation FuncLoc;
710
711 static CheckFallThroughDiagnostics MakeForFunction(Sema &S,
712 const Decl *Func) {
713 CheckFallThroughDiagnostics D;
714 D.FuncLoc = Func->getLocation();
715 D.diag_FallThrough_HasNoReturn = diag::warn_noreturn_has_return_expr;
716 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
717
718 // Don't suggest that virtual functions be marked "noreturn", since they
719 // might be overridden by non-noreturn functions.
720 bool isVirtualMethod = false;
721 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Val: Func))
722 isVirtualMethod = Method->isVirtual();
723
724 // Don't suggest that template instantiations be marked "noreturn"
725 bool isTemplateInstantiation = false;
726 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: Func)) {
727 isTemplateInstantiation = Function->isTemplateInstantiation();
728 if (!S.getLangOpts().CPlusPlus && !S.getLangOpts().C99 &&
729 Function->isMain()) {
730 D.diag_FallThrough_ReturnsNonVoid = diag::ext_main_no_return;
731 }
732 }
733
734 if (!isVirtualMethod && !isTemplateInstantiation)
735 D.diag_NeverFallThroughOrReturn = diag::warn_suggest_noreturn_function;
736
737 D.FunKind = diag::FalloffFunctionKind::Function;
738 return D;
739 }
740
741 static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
742 CheckFallThroughDiagnostics D;
743 D.FuncLoc = Func->getLocation();
744 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
745 D.FunKind = diag::FalloffFunctionKind::Coroutine;
746 return D;
747 }
748
749 static CheckFallThroughDiagnostics MakeForBlock() {
750 CheckFallThroughDiagnostics D;
751 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
752 D.diag_FallThrough_ReturnsNonVoid = diag::err_falloff_nonvoid;
753 D.FunKind = diag::FalloffFunctionKind::Block;
754 return D;
755 }
756
757 static CheckFallThroughDiagnostics MakeForLambda() {
758 CheckFallThroughDiagnostics D;
759 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
760 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
761 D.FunKind = diag::FalloffFunctionKind::Lambda;
762 return D;
763 }
764
765 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
766 bool HasNoReturn) const {
767 if (FunKind == diag::FalloffFunctionKind::Function) {
768 return (ReturnsVoid ||
769 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
770 (!HasNoReturn ||
771 D.isIgnored(DiagID: diag::warn_noreturn_has_return_expr, Loc: FuncLoc)) &&
772 (!ReturnsVoid ||
773 D.isIgnored(DiagID: diag::warn_suggest_noreturn_block, Loc: FuncLoc));
774 }
775 if (FunKind == diag::FalloffFunctionKind::Coroutine) {
776 return (ReturnsVoid ||
777 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
778 (!HasNoReturn);
779 }
780 // For blocks / lambdas.
781 return ReturnsVoid && !HasNoReturn;
782 }
783};
784
785} // anonymous namespace
786
787/// CheckFallThroughForBody - Check that we don't fall off the end of a
788/// function that should return a value. Check that we don't fall off the end
789/// of a noreturn function. We assume that functions and blocks not marked
790/// noreturn will return.
791static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
792 QualType BlockType,
793 const CheckFallThroughDiagnostics &CD,
794 AnalysisDeclContext &AC) {
795
796 bool ReturnsVoid = false;
797 bool HasNoReturn = false;
798
799 if (const auto *FD = dyn_cast<FunctionDecl>(Val: D)) {
800 if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Val: Body))
801 ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
802 else
803 ReturnsVoid = FD->getReturnType()->isVoidType();
804 HasNoReturn = FD->isNoReturn() || FD->hasAttr<InferredNoReturnAttr>();
805 }
806 else if (const auto *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
807 ReturnsVoid = MD->getReturnType()->isVoidType();
808 HasNoReturn = MD->hasAttr<NoReturnAttr>();
809 }
810 else if (isa<BlockDecl>(Val: D)) {
811 if (const FunctionType *FT =
812 BlockType->getPointeeType()->getAs<FunctionType>()) {
813 if (FT->getReturnType()->isVoidType())
814 ReturnsVoid = true;
815 if (FT->getNoReturnAttr())
816 HasNoReturn = true;
817 }
818 }
819
820 DiagnosticsEngine &Diags = S.getDiagnostics();
821
822 // Short circuit for compilation speed.
823 if (CD.checkDiagnostics(D&: Diags, ReturnsVoid, HasNoReturn))
824 return;
825 SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
826
827 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
828 if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
829 return;
830
831 // Either in a function body compound statement, or a function-try-block.
832 switch (int FallThroughType = CheckFallThrough(AC)) {
833 case UnknownFallThrough:
834 break;
835
836 case MaybeFallThrough:
837 case AlwaysFallThrough:
838 if (HasNoReturn) {
839 if (CD.diag_FallThrough_HasNoReturn)
840 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_HasNoReturn) << CD.FunKind;
841 } else if (!ReturnsVoid && CD.diag_FallThrough_ReturnsNonVoid) {
842 // If the final statement is a call to an always-throwing function,
843 // don't warn about the fall-through.
844 if (D->getAsFunction()) {
845 if (const auto *CS = dyn_cast<CompoundStmt>(Val: Body);
846 CS && !CS->body_empty()) {
847 const Stmt *LastStmt = CS->body_back();
848 // Unwrap ExprWithCleanups if necessary.
849 if (const auto *EWC = dyn_cast<ExprWithCleanups>(Val: LastStmt)) {
850 LastStmt = EWC->getSubExpr();
851 }
852 if (const auto *CE = dyn_cast<CallExpr>(Val: LastStmt)) {
853 if (const FunctionDecl *Callee = CE->getDirectCallee();
854 Callee && Callee->hasAttr<InferredNoReturnAttr>()) {
855 return; // Don't warn about fall-through.
856 }
857 }
858 // Direct throw.
859 if (isa<CXXThrowExpr>(Val: LastStmt)) {
860 return; // Don't warn about fall-through.
861 }
862 }
863 }
864 bool NotInAllControlPaths = FallThroughType == MaybeFallThrough;
865 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_ReturnsNonVoid)
866 << CD.FunKind << NotInAllControlPaths;
867 }
868 break;
869 case NeverFallThroughOrReturn:
870 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
871 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
872 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 0 << FD;
873 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
874 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 1 << MD;
875 } else {
876 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn);
877 }
878 }
879 break;
880 case NeverFallThrough:
881 break;
882 }
883}
884
885//===----------------------------------------------------------------------===//
886// -Wuninitialized
887//===----------------------------------------------------------------------===//
888
889namespace {
890/// ContainsReference - A visitor class to search for references to
891/// a particular declaration (the needle) within any evaluated component of an
892/// expression (recursively).
893class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
894 bool FoundReference;
895 const DeclRefExpr *Needle;
896
897public:
898 typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
899
900 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
901 : Inherited(Context), FoundReference(false), Needle(Needle) {}
902
903 void VisitExpr(const Expr *E) {
904 // Stop evaluating if we already have a reference.
905 if (FoundReference)
906 return;
907
908 Inherited::VisitExpr(S: E);
909 }
910
911 void VisitDeclRefExpr(const DeclRefExpr *E) {
912 if (E == Needle)
913 FoundReference = true;
914 else
915 Inherited::VisitDeclRefExpr(E);
916 }
917
918 bool doesContainReference() const { return FoundReference; }
919};
920} // anonymous namespace
921
922static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
923 QualType VariableTy = VD->getType().getCanonicalType();
924 if (VariableTy->isBlockPointerType() &&
925 !VD->hasAttr<BlocksAttr>()) {
926 S.Diag(Loc: VD->getLocation(), DiagID: diag::note_block_var_fixit_add_initialization)
927 << VD->getDeclName()
928 << FixItHint::CreateInsertion(InsertionLoc: VD->getLocation(), Code: "__block ");
929 return true;
930 }
931
932 // Don't issue a fixit if there is already an initializer.
933 if (VD->getInit())
934 return false;
935
936 // Don't suggest a fixit inside macros.
937 if (VD->getEndLoc().isMacroID())
938 return false;
939
940 SourceLocation Loc = S.getLocForEndOfToken(Loc: VD->getEndLoc());
941
942 // Suggest possible initialization (if any).
943 std::string Init = S.getFixItZeroInitializerForType(T: VariableTy, Loc);
944 if (Init.empty())
945 return false;
946
947 S.Diag(Loc, DiagID: diag::note_var_fixit_add_initialization) << VD->getDeclName()
948 << FixItHint::CreateInsertion(InsertionLoc: Loc, Code: Init);
949 return true;
950}
951
952/// Create a fixit to remove an if-like statement, on the assumption that its
953/// condition is CondVal.
954static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
955 const Stmt *Else, bool CondVal,
956 FixItHint &Fixit1, FixItHint &Fixit2) {
957 if (CondVal) {
958 // If condition is always true, remove all but the 'then'.
959 Fixit1 = FixItHint::CreateRemoval(
960 RemoveRange: CharSourceRange::getCharRange(B: If->getBeginLoc(), E: Then->getBeginLoc()));
961 if (Else) {
962 SourceLocation ElseKwLoc = S.getLocForEndOfToken(Loc: Then->getEndLoc());
963 Fixit2 =
964 FixItHint::CreateRemoval(RemoveRange: SourceRange(ElseKwLoc, Else->getEndLoc()));
965 }
966 } else {
967 // If condition is always false, remove all but the 'else'.
968 if (Else)
969 Fixit1 = FixItHint::CreateRemoval(RemoveRange: CharSourceRange::getCharRange(
970 B: If->getBeginLoc(), E: Else->getBeginLoc()));
971 else
972 Fixit1 = FixItHint::CreateRemoval(RemoveRange: If->getSourceRange());
973 }
974}
975
976/// DiagUninitUse -- Helper function to produce a diagnostic for an
977/// uninitialized use of a variable.
978static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
979 bool IsCapturedByBlock) {
980 bool Diagnosed = false;
981
982 switch (Use.getKind()) {
983 case UninitUse::Always:
984 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_var)
985 << VD->getDeclName() << IsCapturedByBlock
986 << Use.getUser()->getSourceRange();
987 return;
988
989 case UninitUse::AfterDecl:
990 case UninitUse::AfterCall:
991 S.Diag(Loc: VD->getLocation(), DiagID: diag::warn_sometimes_uninit_var)
992 << VD->getDeclName() << IsCapturedByBlock
993 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
994 << VD->getLexicalDeclContext() << VD->getSourceRange();
995 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::note_uninit_var_use)
996 << IsCapturedByBlock << Use.getUser()->getSourceRange();
997 return;
998
999 case UninitUse::Maybe:
1000 case UninitUse::Sometimes:
1001 // Carry on to report sometimes-uninitialized branches, if possible,
1002 // or a 'may be used uninitialized' diagnostic otherwise.
1003 break;
1004 }
1005
1006 // Diagnose each branch which leads to a sometimes-uninitialized use.
1007 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
1008 I != E; ++I) {
1009 assert(Use.getKind() == UninitUse::Sometimes);
1010
1011 const Expr *User = Use.getUser();
1012 const Stmt *Term = I->Terminator;
1013
1014 // Information used when building the diagnostic.
1015 unsigned DiagKind;
1016 StringRef Str;
1017 SourceRange Range;
1018
1019 // FixIts to suppress the diagnostic by removing the dead condition.
1020 // For all binary terminators, branch 0 is taken if the condition is true,
1021 // and branch 1 is taken if the condition is false.
1022 int RemoveDiagKind = -1;
1023 const char *FixitStr =
1024 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
1025 : (I->Output ? "1" : "0");
1026 FixItHint Fixit1, Fixit2;
1027
1028 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
1029 default:
1030 // Don't know how to report this. Just fall back to 'may be used
1031 // uninitialized'. FIXME: Can this happen?
1032 continue;
1033
1034 // "condition is true / condition is false".
1035 case Stmt::IfStmtClass: {
1036 const IfStmt *IS = cast<IfStmt>(Val: Term);
1037 DiagKind = 0;
1038 Str = "if";
1039 Range = IS->getCond()->getSourceRange();
1040 RemoveDiagKind = 0;
1041 CreateIfFixit(S, If: IS, Then: IS->getThen(), Else: IS->getElse(),
1042 CondVal: I->Output, Fixit1, Fixit2);
1043 break;
1044 }
1045 case Stmt::ConditionalOperatorClass: {
1046 const ConditionalOperator *CO = cast<ConditionalOperator>(Val: Term);
1047 DiagKind = 0;
1048 Str = "?:";
1049 Range = CO->getCond()->getSourceRange();
1050 RemoveDiagKind = 0;
1051 CreateIfFixit(S, If: CO, Then: CO->getTrueExpr(), Else: CO->getFalseExpr(),
1052 CondVal: I->Output, Fixit1, Fixit2);
1053 break;
1054 }
1055 case Stmt::BinaryOperatorClass: {
1056 const BinaryOperator *BO = cast<BinaryOperator>(Val: Term);
1057 if (!BO->isLogicalOp())
1058 continue;
1059 DiagKind = 0;
1060 Str = BO->getOpcodeStr();
1061 Range = BO->getLHS()->getSourceRange();
1062 RemoveDiagKind = 0;
1063 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
1064 (BO->getOpcode() == BO_LOr && !I->Output))
1065 // true && y -> y, false || y -> y.
1066 Fixit1 = FixItHint::CreateRemoval(
1067 RemoveRange: SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
1068 else
1069 // false && y -> false, true || y -> true.
1070 Fixit1 = FixItHint::CreateReplacement(RemoveRange: BO->getSourceRange(), Code: FixitStr);
1071 break;
1072 }
1073
1074 // "loop is entered / loop is exited".
1075 case Stmt::WhileStmtClass:
1076 DiagKind = 1;
1077 Str = "while";
1078 Range = cast<WhileStmt>(Val: Term)->getCond()->getSourceRange();
1079 RemoveDiagKind = 1;
1080 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1081 break;
1082 case Stmt::ForStmtClass:
1083 DiagKind = 1;
1084 Str = "for";
1085 Range = cast<ForStmt>(Val: Term)->getCond()->getSourceRange();
1086 RemoveDiagKind = 1;
1087 if (I->Output)
1088 Fixit1 = FixItHint::CreateRemoval(RemoveRange: Range);
1089 else
1090 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1091 break;
1092 case Stmt::CXXForRangeStmtClass:
1093 if (I->Output == 1) {
1094 // The use occurs if a range-based for loop's body never executes.
1095 // That may be impossible, and there's no syntactic fix for this,
1096 // so treat it as a 'may be uninitialized' case.
1097 continue;
1098 }
1099 DiagKind = 1;
1100 Str = "for";
1101 Range = cast<CXXForRangeStmt>(Val: Term)->getRangeInit()->getSourceRange();
1102 break;
1103
1104 // "condition is true / loop is exited".
1105 case Stmt::DoStmtClass:
1106 DiagKind = 2;
1107 Str = "do";
1108 Range = cast<DoStmt>(Val: Term)->getCond()->getSourceRange();
1109 RemoveDiagKind = 1;
1110 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1111 break;
1112
1113 // "switch case is taken".
1114 case Stmt::CaseStmtClass:
1115 DiagKind = 3;
1116 Str = "case";
1117 Range = cast<CaseStmt>(Val: Term)->getLHS()->getSourceRange();
1118 break;
1119 case Stmt::DefaultStmtClass:
1120 DiagKind = 3;
1121 Str = "default";
1122 Range = cast<DefaultStmt>(Val: Term)->getDefaultLoc();
1123 break;
1124 }
1125
1126 S.Diag(Loc: Range.getBegin(), DiagID: diag::warn_sometimes_uninit_var)
1127 << VD->getDeclName() << IsCapturedByBlock << DiagKind
1128 << Str << I->Output << Range;
1129 S.Diag(Loc: User->getBeginLoc(), DiagID: diag::note_uninit_var_use)
1130 << IsCapturedByBlock << User->getSourceRange();
1131 if (RemoveDiagKind != -1)
1132 S.Diag(Loc: Fixit1.RemoveRange.getBegin(), DiagID: diag::note_uninit_fixit_remove_cond)
1133 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
1134
1135 Diagnosed = true;
1136 }
1137
1138 if (!Diagnosed)
1139 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_maybe_uninit_var)
1140 << VD->getDeclName() << IsCapturedByBlock
1141 << Use.getUser()->getSourceRange();
1142}
1143
1144/// Diagnose uninitialized const reference usages.
1145static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1146 const UninitUse &Use) {
1147 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_reference)
1148 << VD->getDeclName() << Use.getUser()->getSourceRange();
1149 return !S.getDiagnostics().isLastDiagnosticIgnored();
1150}
1151
1152/// Diagnose uninitialized const pointer usages.
1153static bool DiagnoseUninitializedConstPtrUse(Sema &S, const VarDecl *VD,
1154 const UninitUse &Use) {
1155 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_pointer)
1156 << VD->getDeclName() << Use.getUser()->getSourceRange();
1157 return !S.getDiagnostics().isLastDiagnosticIgnored();
1158}
1159
1160/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1161/// uninitialized variable. This manages the different forms of diagnostic
1162/// emitted for particular types of uses. Returns true if the use was diagnosed
1163/// as a warning. If a particular use is one we omit warnings for, returns
1164/// false.
1165static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1166 const UninitUse &Use,
1167 bool alwaysReportSelfInit = false) {
1168 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Val: Use.getUser())) {
1169 // Inspect the initializer of the variable declaration which is
1170 // being referenced prior to its initialization. We emit
1171 // specialized diagnostics for self-initialization, and we
1172 // specifically avoid warning about self references which take the
1173 // form of:
1174 //
1175 // int x = x;
1176 //
1177 // This is used to indicate to GCC that 'x' is intentionally left
1178 // uninitialized. Proven code paths which access 'x' in
1179 // an uninitialized state after this will still warn.
1180 if (const Expr *Initializer = VD->getInit()) {
1181 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1182 return false;
1183
1184 ContainsReference CR(S.Context, DRE);
1185 CR.Visit(S: Initializer);
1186 if (CR.doesContainReference()) {
1187 S.Diag(Loc: DRE->getBeginLoc(), DiagID: diag::warn_uninit_self_reference_in_init)
1188 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1189 return !S.getDiagnostics().isLastDiagnosticIgnored();
1190 }
1191 }
1192
1193 DiagUninitUse(S, VD, Use, IsCapturedByBlock: false);
1194 } else {
1195 const BlockExpr *BE = cast<BlockExpr>(Val: Use.getUser());
1196 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1197 S.Diag(Loc: BE->getBeginLoc(),
1198 DiagID: diag::warn_uninit_byref_blockvar_captured_by_block)
1199 << VD->getDeclName()
1200 << VD->getType().getQualifiers().hasObjCLifetime();
1201 else
1202 DiagUninitUse(S, VD, Use, IsCapturedByBlock: true);
1203 }
1204
1205 // Report where the variable was declared when the use wasn't within
1206 // the initializer of that declaration & we didn't already suggest
1207 // an initialization fixit.
1208 if (!SuggestInitializationFixit(S, VD))
1209 S.Diag(Loc: VD->getBeginLoc(), DiagID: diag::note_var_declared_here)
1210 << VD->getDeclName();
1211
1212 return !S.getDiagnostics().isLastDiagnosticIgnored();
1213}
1214
1215namespace {
1216class FallthroughMapper : public DynamicRecursiveASTVisitor {
1217public:
1218 FallthroughMapper(Sema &S) : FoundSwitchStatements(false), S(S) {
1219 ShouldWalkTypesOfTypeLocs = false;
1220 }
1221
1222 bool foundSwitchStatements() const { return FoundSwitchStatements; }
1223
1224 void markFallthroughVisited(const AttributedStmt *Stmt) {
1225 bool Found = FallthroughStmts.erase(Ptr: Stmt);
1226 assert(Found);
1227 (void)Found;
1228 }
1229
1230 typedef llvm::SmallPtrSet<const AttributedStmt *, 8> AttrStmts;
1231
1232 const AttrStmts &getFallthroughStmts() const { return FallthroughStmts; }
1233
1234 void fillReachableBlocks(CFG *Cfg) {
1235 assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1236 std::deque<const CFGBlock *> BlockQueue;
1237
1238 ReachableBlocks.insert(Ptr: &Cfg->getEntry());
1239 BlockQueue.push_back(x: &Cfg->getEntry());
1240 // Mark all case blocks reachable to avoid problems with switching on
1241 // constants, covered enums, etc.
1242 // These blocks can contain fall-through annotations, and we don't want to
1243 // issue a warn_fallthrough_attr_unreachable for them.
1244 for (const auto *B : *Cfg) {
1245 const Stmt *L = B->getLabel();
1246 if (isa_and_nonnull<SwitchCase>(Val: L) && ReachableBlocks.insert(Ptr: B).second)
1247 BlockQueue.push_back(x: B);
1248 }
1249
1250 while (!BlockQueue.empty()) {
1251 const CFGBlock *P = BlockQueue.front();
1252 BlockQueue.pop_front();
1253 for (const CFGBlock *B : P->succs()) {
1254 if (B && ReachableBlocks.insert(Ptr: B).second)
1255 BlockQueue.push_back(x: B);
1256 }
1257 }
1258 }
1259
1260 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1261 bool IsTemplateInstantiation) {
1262 assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1263
1264 int UnannotatedCnt = 0;
1265 AnnotatedCnt = 0;
1266
1267 std::deque<const CFGBlock *> BlockQueue(B.pred_begin(), B.pred_end());
1268 while (!BlockQueue.empty()) {
1269 const CFGBlock *P = BlockQueue.front();
1270 BlockQueue.pop_front();
1271 if (!P)
1272 continue;
1273
1274 const Stmt *Term = P->getTerminatorStmt();
1275 if (isa_and_nonnull<SwitchStmt>(Val: Term))
1276 continue; // Switch statement, good.
1277
1278 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: P->getLabel());
1279 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1280 continue; // Previous case label has no statements, good.
1281
1282 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(Val: P->getLabel());
1283 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1284 continue; // Case label is preceded with a normal label, good.
1285
1286 if (!ReachableBlocks.count(Ptr: P)) {
1287 for (const CFGElement &Elem : llvm::reverse(C: *P)) {
1288 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1289 if (const AttributedStmt *AS = asFallThroughAttr(S: CS->getStmt())) {
1290 // Don't issue a warning for an unreachable fallthrough
1291 // attribute in template instantiations as it may not be
1292 // unreachable in all instantiations of the template.
1293 if (!IsTemplateInstantiation)
1294 S.Diag(Loc: AS->getBeginLoc(),
1295 DiagID: diag::warn_unreachable_fallthrough_attr);
1296 markFallthroughVisited(Stmt: AS);
1297 ++AnnotatedCnt;
1298 break;
1299 }
1300 // Don't care about other unreachable statements.
1301 }
1302 }
1303 // If there are no unreachable statements, this may be a special
1304 // case in CFG:
1305 // case X: {
1306 // A a; // A has a destructor.
1307 // break;
1308 // }
1309 // // <<<< This place is represented by a 'hanging' CFG block.
1310 // case Y:
1311 continue;
1312 }
1313
1314 const Stmt *LastStmt = getLastStmt(B: *P);
1315 if (const AttributedStmt *AS = asFallThroughAttr(S: LastStmt)) {
1316 markFallthroughVisited(Stmt: AS);
1317 ++AnnotatedCnt;
1318 continue; // Fallthrough annotation, good.
1319 }
1320
1321 if (!LastStmt) { // This block contains no executable statements.
1322 // Traverse its predecessors.
1323 std::copy(first: P->pred_begin(), last: P->pred_end(),
1324 result: std::back_inserter(x&: BlockQueue));
1325 continue;
1326 }
1327
1328 ++UnannotatedCnt;
1329 }
1330 return !!UnannotatedCnt;
1331 }
1332
1333 bool VisitAttributedStmt(AttributedStmt *S) override {
1334 if (asFallThroughAttr(S))
1335 FallthroughStmts.insert(Ptr: S);
1336 return true;
1337 }
1338
1339 bool VisitSwitchStmt(SwitchStmt *S) override {
1340 FoundSwitchStatements = true;
1341 return true;
1342 }
1343
1344 // We don't want to traverse local type declarations. We analyze their
1345 // methods separately.
1346 bool TraverseDecl(Decl *D) override { return true; }
1347
1348 // We analyze lambda bodies separately. Skip them here.
1349 bool TraverseLambdaExpr(LambdaExpr *LE) override {
1350 // Traverse the captures, but not the body.
1351 for (const auto C : zip(t: LE->captures(), u: LE->capture_inits()))
1352 TraverseLambdaCapture(LE, C: &std::get<0>(t: C), Init: std::get<1>(t: C));
1353 return true;
1354 }
1355
1356 private:
1357
1358 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1359 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(Val: S)) {
1360 if (hasSpecificAttr<FallThroughAttr>(container: AS->getAttrs()))
1361 return AS;
1362 }
1363 return nullptr;
1364 }
1365
1366 static const Stmt *getLastStmt(const CFGBlock &B) {
1367 if (const Stmt *Term = B.getTerminatorStmt())
1368 return Term;
1369 for (const CFGElement &Elem : llvm::reverse(C: B))
1370 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1371 return CS->getStmt();
1372 // Workaround to detect a statement thrown out by CFGBuilder:
1373 // case X: {} case Y:
1374 // case X: ; case Y:
1375 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: B.getLabel()))
1376 if (!isa<SwitchCase>(Val: SW->getSubStmt()))
1377 return SW->getSubStmt();
1378
1379 return nullptr;
1380 }
1381
1382 bool FoundSwitchStatements;
1383 AttrStmts FallthroughStmts;
1384 Sema &S;
1385 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1386};
1387} // anonymous namespace
1388
1389static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1390 SourceLocation Loc) {
1391 TokenValue FallthroughTokens[] = {
1392 tok::l_square, tok::l_square,
1393 PP.getIdentifierInfo(Name: "fallthrough"),
1394 tok::r_square, tok::r_square
1395 };
1396
1397 TokenValue ClangFallthroughTokens[] = {
1398 tok::l_square, tok::l_square, PP.getIdentifierInfo(Name: "clang"),
1399 tok::coloncolon, PP.getIdentifierInfo(Name: "fallthrough"),
1400 tok::r_square, tok::r_square
1401 };
1402
1403 bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1404
1405 StringRef MacroName;
1406 if (PreferClangAttr)
1407 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1408 if (MacroName.empty())
1409 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: FallthroughTokens);
1410 if (MacroName.empty() && !PreferClangAttr)
1411 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1412 if (MacroName.empty()) {
1413 if (!PreferClangAttr)
1414 MacroName = "[[fallthrough]]";
1415 else if (PP.getLangOpts().CPlusPlus)
1416 MacroName = "[[clang::fallthrough]]";
1417 else
1418 MacroName = "__attribute__((fallthrough))";
1419 }
1420 return MacroName;
1421}
1422
1423static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1424 bool PerFunction) {
1425 FallthroughMapper FM(S);
1426 FM.TraverseStmt(S: AC.getBody());
1427
1428 if (!FM.foundSwitchStatements())
1429 return;
1430
1431 if (PerFunction && FM.getFallthroughStmts().empty())
1432 return;
1433
1434 CFG *Cfg = AC.getCFG();
1435
1436 if (!Cfg)
1437 return;
1438
1439 FM.fillReachableBlocks(Cfg);
1440
1441 for (const CFGBlock *B : llvm::reverse(C&: *Cfg)) {
1442 const Stmt *Label = B->getLabel();
1443
1444 if (!isa_and_nonnull<SwitchCase>(Val: Label))
1445 continue;
1446
1447 int AnnotatedCnt;
1448
1449 bool IsTemplateInstantiation = false;
1450 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: AC.getDecl()))
1451 IsTemplateInstantiation = Function->isTemplateInstantiation();
1452 if (!FM.checkFallThroughIntoBlock(B: *B, AnnotatedCnt,
1453 IsTemplateInstantiation))
1454 continue;
1455
1456 S.Diag(Loc: Label->getBeginLoc(),
1457 DiagID: PerFunction ? diag::warn_unannotated_fallthrough_per_function
1458 : diag::warn_unannotated_fallthrough);
1459
1460 if (!AnnotatedCnt) {
1461 SourceLocation L = Label->getBeginLoc();
1462 if (L.isMacroID())
1463 continue;
1464
1465 const Stmt *Term = B->getTerminatorStmt();
1466 // Skip empty cases.
1467 while (B->empty() && !Term && B->succ_size() == 1) {
1468 B = *B->succ_begin();
1469 Term = B->getTerminatorStmt();
1470 }
1471 if (!(B->empty() && isa_and_nonnull<BreakStmt>(Val: Term))) {
1472 Preprocessor &PP = S.getPreprocessor();
1473 StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, Loc: L);
1474 SmallString<64> TextToInsert(AnnotationSpelling);
1475 TextToInsert += "; ";
1476 S.Diag(Loc: L, DiagID: diag::note_insert_fallthrough_fixit)
1477 << AnnotationSpelling
1478 << FixItHint::CreateInsertion(InsertionLoc: L, Code: TextToInsert);
1479 }
1480 S.Diag(Loc: L, DiagID: diag::note_insert_break_fixit)
1481 << FixItHint::CreateInsertion(InsertionLoc: L, Code: "break; ");
1482 }
1483 }
1484
1485 for (const auto *F : FM.getFallthroughStmts())
1486 S.Diag(Loc: F->getBeginLoc(), DiagID: diag::err_fallthrough_attr_invalid_placement);
1487}
1488
1489static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1490 const Stmt *S) {
1491 assert(S);
1492
1493 do {
1494 switch (S->getStmtClass()) {
1495 case Stmt::ForStmtClass:
1496 case Stmt::WhileStmtClass:
1497 case Stmt::CXXForRangeStmtClass:
1498 case Stmt::ObjCForCollectionStmtClass:
1499 return true;
1500 case Stmt::DoStmtClass: {
1501 Expr::EvalResult Result;
1502 if (!cast<DoStmt>(Val: S)->getCond()->EvaluateAsInt(Result, Ctx))
1503 return true;
1504 return Result.Val.getInt().getBoolValue();
1505 }
1506 default:
1507 break;
1508 }
1509 } while ((S = PM.getParent(S)));
1510
1511 return false;
1512}
1513
1514static void diagnoseRepeatedUseOfWeak(Sema &S,
1515 const sema::FunctionScopeInfo *CurFn,
1516 const Decl *D,
1517 const ParentMap &PM) {
1518 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1519 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1520 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1521 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1522 StmtUsesPair;
1523
1524 ASTContext &Ctx = S.getASTContext();
1525
1526 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1527
1528 // Extract all weak objects that are referenced more than once.
1529 SmallVector<StmtUsesPair, 8> UsesByStmt;
1530 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1531 I != E; ++I) {
1532 const WeakUseVector &Uses = I->second;
1533
1534 // Find the first read of the weak object.
1535 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1536 for ( ; UI != UE; ++UI) {
1537 if (UI->isUnsafe())
1538 break;
1539 }
1540
1541 // If there were only writes to this object, don't warn.
1542 if (UI == UE)
1543 continue;
1544
1545 // If there was only one read, followed by any number of writes, and the
1546 // read is not within a loop, don't warn. Additionally, don't warn in a
1547 // loop if the base object is a local variable -- local variables are often
1548 // changed in loops.
1549 if (UI == Uses.begin()) {
1550 WeakUseVector::const_iterator UI2 = UI;
1551 for (++UI2; UI2 != UE; ++UI2)
1552 if (UI2->isUnsafe())
1553 break;
1554
1555 if (UI2 == UE) {
1556 if (!isInLoop(Ctx, PM, S: UI->getUseExpr()))
1557 continue;
1558
1559 const WeakObjectProfileTy &Profile = I->first;
1560 if (!Profile.isExactProfile())
1561 continue;
1562
1563 const NamedDecl *Base = Profile.getBase();
1564 if (!Base)
1565 Base = Profile.getProperty();
1566 assert(Base && "A profile always has a base or property.");
1567
1568 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Val: Base))
1569 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Val: Base))
1570 continue;
1571 }
1572 }
1573
1574 UsesByStmt.push_back(Elt: StmtUsesPair(UI->getUseExpr(), I));
1575 }
1576
1577 if (UsesByStmt.empty())
1578 return;
1579
1580 // Sort by first use so that we emit the warnings in a deterministic order.
1581 SourceManager &SM = S.getSourceManager();
1582 llvm::sort(C&: UsesByStmt,
1583 Comp: [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1584 return SM.isBeforeInTranslationUnit(LHS: LHS.first->getBeginLoc(),
1585 RHS: RHS.first->getBeginLoc());
1586 });
1587
1588 // Classify the current code body for better warning text.
1589 // This enum should stay in sync with the cases in
1590 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1591 // FIXME: Should we use a common classification enum and the same set of
1592 // possibilities all throughout Sema?
1593 enum {
1594 Function,
1595 Method,
1596 Block,
1597 Lambda
1598 } FunctionKind;
1599
1600 if (isa<sema::BlockScopeInfo>(Val: CurFn))
1601 FunctionKind = Block;
1602 else if (isa<sema::LambdaScopeInfo>(Val: CurFn))
1603 FunctionKind = Lambda;
1604 else if (isa<ObjCMethodDecl>(Val: D))
1605 FunctionKind = Method;
1606 else
1607 FunctionKind = Function;
1608
1609 // Iterate through the sorted problems and emit warnings for each.
1610 for (const auto &P : UsesByStmt) {
1611 const Stmt *FirstRead = P.first;
1612 const WeakObjectProfileTy &Key = P.second->first;
1613 const WeakUseVector &Uses = P.second->second;
1614
1615 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1616 // may not contain enough information to determine that these are different
1617 // properties. We can only be 100% sure of a repeated use in certain cases,
1618 // and we adjust the diagnostic kind accordingly so that the less certain
1619 // case can be turned off if it is too noisy.
1620 unsigned DiagKind;
1621 if (Key.isExactProfile())
1622 DiagKind = diag::warn_arc_repeated_use_of_weak;
1623 else
1624 DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1625
1626 // Classify the weak object being accessed for better warning text.
1627 // This enum should stay in sync with the cases in
1628 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1629 enum {
1630 Variable,
1631 Property,
1632 ImplicitProperty,
1633 Ivar
1634 } ObjectKind;
1635
1636 const NamedDecl *KeyProp = Key.getProperty();
1637 if (isa<VarDecl>(Val: KeyProp))
1638 ObjectKind = Variable;
1639 else if (isa<ObjCPropertyDecl>(Val: KeyProp))
1640 ObjectKind = Property;
1641 else if (isa<ObjCMethodDecl>(Val: KeyProp))
1642 ObjectKind = ImplicitProperty;
1643 else if (isa<ObjCIvarDecl>(Val: KeyProp))
1644 ObjectKind = Ivar;
1645 else
1646 llvm_unreachable("Unexpected weak object kind!");
1647
1648 // Do not warn about IBOutlet weak property receivers being set to null
1649 // since they are typically only used from the main thread.
1650 if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(Val: KeyProp))
1651 if (Prop->hasAttr<IBOutletAttr>())
1652 continue;
1653
1654 // Show the first time the object was read.
1655 S.Diag(Loc: FirstRead->getBeginLoc(), DiagID: DiagKind)
1656 << int(ObjectKind) << KeyProp << int(FunctionKind)
1657 << FirstRead->getSourceRange();
1658
1659 // Print all the other accesses as notes.
1660 for (const auto &Use : Uses) {
1661 if (Use.getUseExpr() == FirstRead)
1662 continue;
1663 S.Diag(Loc: Use.getUseExpr()->getBeginLoc(),
1664 DiagID: diag::note_arc_weak_also_accessed_here)
1665 << Use.getUseExpr()->getSourceRange();
1666 }
1667 }
1668}
1669
1670namespace clang {
1671namespace {
1672typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1673typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1674typedef std::list<DelayedDiag> DiagList;
1675
1676struct SortDiagBySourceLocation {
1677 SourceManager &SM;
1678 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1679
1680 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1681 // Although this call will be slow, this is only called when outputting
1682 // multiple warnings.
1683 return SM.isBeforeInTranslationUnit(LHS: left.first.first, RHS: right.first.first);
1684 }
1685};
1686} // anonymous namespace
1687} // namespace clang
1688
1689namespace {
1690class UninitValsDiagReporter : public UninitVariablesHandler {
1691 Sema &S;
1692 typedef SmallVector<UninitUse, 2> UsesVec;
1693 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1694 // Prefer using MapVector to DenseMap, so that iteration order will be
1695 // the same as insertion order. This is needed to obtain a deterministic
1696 // order of diagnostics when calling flushDiagnostics().
1697 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1698 UsesMap uses;
1699
1700public:
1701 UninitValsDiagReporter(Sema &S) : S(S) {}
1702 ~UninitValsDiagReporter() override { flushDiagnostics(); }
1703
1704 MappedType &getUses(const VarDecl *vd) {
1705 MappedType &V = uses[vd];
1706 if (!V.getPointer())
1707 V.setPointer(new UsesVec());
1708 return V;
1709 }
1710
1711 void handleUseOfUninitVariable(const VarDecl *vd,
1712 const UninitUse &use) override {
1713 getUses(vd).getPointer()->push_back(Elt: use);
1714 }
1715
1716 void handleSelfInit(const VarDecl *vd) override { getUses(vd).setInt(true); }
1717
1718 void flushDiagnostics() {
1719 for (const auto &P : uses) {
1720 const VarDecl *vd = P.first;
1721 const MappedType &V = P.second;
1722
1723 UsesVec *vec = V.getPointer();
1724 bool hasSelfInit = V.getInt();
1725
1726 diagnoseUnitializedVar(vd, hasSelfInit, vec);
1727
1728 // Release the uses vector.
1729 delete vec;
1730 }
1731
1732 uses.clear();
1733 }
1734
1735private:
1736 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1737 return llvm::any_of(Range: *vec, P: [](const UninitUse &U) {
1738 return U.getKind() == UninitUse::Always ||
1739 U.getKind() == UninitUse::AfterCall ||
1740 U.getKind() == UninitUse::AfterDecl;
1741 });
1742 }
1743
1744 // Print the diagnostic for the variable. We try to warn only on the first
1745 // point at which a variable is used uninitialized. After the first
1746 // diagnostic is printed, further diagnostics for this variable are skipped.
1747 void diagnoseUnitializedVar(const VarDecl *vd, bool hasSelfInit,
1748 UsesVec *vec) {
1749 // Specially handle the case where we have uses of an uninitialized
1750 // variable, but the root cause is an idiomatic self-init. We want
1751 // to report the diagnostic at the self-init since that is the root cause.
1752 if (hasSelfInit && hasAlwaysUninitializedUse(vec)) {
1753 if (DiagnoseUninitializedUse(S, VD: vd,
1754 Use: UninitUse(vd->getInit()->IgnoreParenCasts(),
1755 /*isAlwaysUninit=*/true),
1756 /*alwaysReportSelfInit=*/true))
1757 return;
1758 }
1759
1760 // Sort the uses by their SourceLocations. While not strictly
1761 // guaranteed to produce them in line/column order, this will provide
1762 // a stable ordering.
1763 llvm::sort(C&: *vec, Comp: [](const UninitUse &a, const UninitUse &b) {
1764 // Prefer the direct use of an uninitialized variable over its use via
1765 // constant reference or pointer.
1766 if (a.isConstRefOrPtrUse() != b.isConstRefOrPtrUse())
1767 return b.isConstRefOrPtrUse();
1768 // Prefer a more confident report over a less confident one.
1769 if (a.getKind() != b.getKind())
1770 return a.getKind() > b.getKind();
1771 return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1772 });
1773
1774 for (const auto &U : *vec) {
1775 if (U.isConstRefUse()) {
1776 if (DiagnoseUninitializedConstRefUse(S, VD: vd, Use: U))
1777 return;
1778 } else if (U.isConstPtrUse()) {
1779 if (DiagnoseUninitializedConstPtrUse(S, VD: vd, Use: U))
1780 return;
1781 } else {
1782 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1783 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1784 if (DiagnoseUninitializedUse(S, VD: vd, Use))
1785 return;
1786 }
1787 }
1788 }
1789};
1790
1791/// Inter-procedural data for the called-once checker.
1792class CalledOnceInterProceduralData {
1793public:
1794 // Add the delayed warning for the given block.
1795 void addDelayedWarning(const BlockDecl *Block,
1796 PartialDiagnosticAt &&Warning) {
1797 DelayedBlockWarnings[Block].emplace_back(Args: std::move(Warning));
1798 }
1799 // Report all of the warnings we've gathered for the given block.
1800 void flushWarnings(const BlockDecl *Block, Sema &S) {
1801 for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1802 S.Diag(Loc: Delayed.first, PD: Delayed.second);
1803
1804 discardWarnings(Block);
1805 }
1806 // Discard all of the warnings we've gathered for the given block.
1807 void discardWarnings(const BlockDecl *Block) {
1808 DelayedBlockWarnings.erase(Val: Block);
1809 }
1810
1811private:
1812 using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1813 llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1814};
1815
1816class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1817public:
1818 CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1819 : S(S), Data(Data) {}
1820 void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1821 const Expr *PrevCall, bool IsCompletionHandler,
1822 bool Poised) override {
1823 auto DiagToReport = IsCompletionHandler
1824 ? diag::warn_completion_handler_called_twice
1825 : diag::warn_called_once_gets_called_twice;
1826 S.Diag(Loc: Call->getBeginLoc(), DiagID: DiagToReport) << Parameter;
1827 S.Diag(Loc: PrevCall->getBeginLoc(), DiagID: diag::note_called_once_gets_called_twice)
1828 << Poised;
1829 }
1830
1831 void handleNeverCalled(const ParmVarDecl *Parameter,
1832 bool IsCompletionHandler) override {
1833 auto DiagToReport = IsCompletionHandler
1834 ? diag::warn_completion_handler_never_called
1835 : diag::warn_called_once_never_called;
1836 S.Diag(Loc: Parameter->getBeginLoc(), DiagID: DiagToReport)
1837 << Parameter << /* Captured */ false;
1838 }
1839
1840 void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1841 const Stmt *Where, NeverCalledReason Reason,
1842 bool IsCalledDirectly,
1843 bool IsCompletionHandler) override {
1844 auto DiagToReport = IsCompletionHandler
1845 ? diag::warn_completion_handler_never_called_when
1846 : diag::warn_called_once_never_called_when;
1847 PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagID: DiagToReport)
1848 << Parameter
1849 << IsCalledDirectly
1850 << (unsigned)Reason);
1851
1852 if (const auto *Block = dyn_cast<BlockDecl>(Val: Function)) {
1853 // We shouldn't report these warnings on blocks immediately
1854 Data.addDelayedWarning(Block, Warning: std::move(Warning));
1855 } else {
1856 S.Diag(Loc: Warning.first, PD: Warning.second);
1857 }
1858 }
1859
1860 void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1861 const Decl *Where,
1862 bool IsCompletionHandler) override {
1863 auto DiagToReport = IsCompletionHandler
1864 ? diag::warn_completion_handler_never_called
1865 : diag::warn_called_once_never_called;
1866 S.Diag(Loc: Where->getBeginLoc(), DiagID: DiagToReport)
1867 << Parameter << /* Captured */ true;
1868 }
1869
1870 void
1871 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1872 Data.flushWarnings(Block, S);
1873 }
1874
1875 void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1876 Data.discardWarnings(Block);
1877 }
1878
1879private:
1880 Sema &S;
1881 CalledOnceInterProceduralData &Data;
1882};
1883
1884constexpr unsigned CalledOnceWarnings[] = {
1885 diag::warn_called_once_never_called,
1886 diag::warn_called_once_never_called_when,
1887 diag::warn_called_once_gets_called_twice};
1888
1889constexpr unsigned CompletionHandlerWarnings[]{
1890 diag::warn_completion_handler_never_called,
1891 diag::warn_completion_handler_never_called_when,
1892 diag::warn_completion_handler_called_twice};
1893
1894bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1895 const DiagnosticsEngine &Diags,
1896 SourceLocation At) {
1897 return llvm::any_of(Range&: DiagIDs, P: [&Diags, At](unsigned DiagID) {
1898 return !Diags.isIgnored(DiagID, Loc: At);
1899 });
1900}
1901
1902bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1903 SourceLocation At) {
1904 return shouldAnalyzeCalledOnceImpl(DiagIDs: CompletionHandlerWarnings, Diags, At);
1905}
1906
1907bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1908 SourceLocation At) {
1909 return shouldAnalyzeCalledOnceImpl(DiagIDs: CalledOnceWarnings, Diags, At) ||
1910 shouldAnalyzeCalledOnceConventions(Diags, At);
1911}
1912} // anonymous namespace
1913
1914//===----------------------------------------------------------------------===//
1915// -Wthread-safety
1916//===----------------------------------------------------------------------===//
1917namespace clang {
1918namespace threadSafety {
1919namespace {
1920class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1921 Sema &S;
1922 DiagList Warnings;
1923 SourceLocation FunLocation, FunEndLocation;
1924
1925 const FunctionDecl *CurrentFunction;
1926 bool Verbose;
1927
1928 OptionalNotes getNotes() const {
1929 if (Verbose && CurrentFunction) {
1930 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1931 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1932 << CurrentFunction);
1933 return OptionalNotes(1, FNote);
1934 }
1935 return OptionalNotes();
1936 }
1937
1938 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1939 OptionalNotes ONS(1, Note);
1940 if (Verbose && CurrentFunction) {
1941 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1942 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1943 << CurrentFunction);
1944 ONS.push_back(Elt: std::move(FNote));
1945 }
1946 return ONS;
1947 }
1948
1949 OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1950 const PartialDiagnosticAt &Note2) const {
1951 OptionalNotes ONS;
1952 ONS.push_back(Elt: Note1);
1953 ONS.push_back(Elt: Note2);
1954 if (Verbose && CurrentFunction) {
1955 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1956 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1957 << CurrentFunction);
1958 ONS.push_back(Elt: std::move(FNote));
1959 }
1960 return ONS;
1961 }
1962
1963 OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1964 return LocLocked.isValid()
1965 ? getNotes(Note: PartialDiagnosticAt(
1966 LocLocked, S.PDiag(DiagID: diag::note_locked_here) << Kind))
1967 : getNotes();
1968 }
1969
1970 OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1971 StringRef Kind) {
1972 return LocUnlocked.isValid()
1973 ? getNotes(Note: PartialDiagnosticAt(
1974 LocUnlocked, S.PDiag(DiagID: diag::note_unlocked_here) << Kind))
1975 : getNotes();
1976 }
1977
1978 OptionalNotes makeManagedMismatchNoteForParam(SourceLocation DeclLoc) {
1979 return DeclLoc.isValid()
1980 ? getNotes(Note: PartialDiagnosticAt(
1981 DeclLoc,
1982 S.PDiag(DiagID: diag::note_managed_mismatch_here_for_param)))
1983 : getNotes();
1984 }
1985
1986 public:
1987 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1988 : S(S), FunLocation(FL), FunEndLocation(FEL),
1989 CurrentFunction(nullptr), Verbose(false) {}
1990
1991 void setVerbose(bool b) { Verbose = b; }
1992
1993 /// Emit all buffered diagnostics in order of sourcelocation.
1994 /// We need to output diagnostics produced while iterating through
1995 /// the lockset in deterministic order, so this function orders diagnostics
1996 /// and outputs them.
1997 void emitDiagnostics() {
1998 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
1999 for (const auto &Diag : Warnings) {
2000 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
2001 for (const auto &Note : Diag.second)
2002 S.Diag(Loc: Note.first, PD: Note.second);
2003 }
2004 }
2005
2006 void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc,
2007 Name scopeName, StringRef Kind,
2008 Name expected, Name actual) override {
2009 PartialDiagnosticAt Warning(Loc,
2010 S.PDiag(DiagID: diag::warn_unmatched_underlying_mutexes)
2011 << Kind << scopeName << expected << actual);
2012 Warnings.emplace_back(args: std::move(Warning),
2013 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2014 }
2015
2016 void handleExpectMoreUnderlyingMutexes(SourceLocation Loc,
2017 SourceLocation DLoc, Name scopeName,
2018 StringRef Kind,
2019 Name expected) override {
2020 PartialDiagnosticAt Warning(
2021 Loc, S.PDiag(DiagID: diag::warn_expect_more_underlying_mutexes)
2022 << Kind << scopeName << expected);
2023 Warnings.emplace_back(args: std::move(Warning),
2024 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2025 }
2026
2027 void handleExpectFewerUnderlyingMutexes(SourceLocation Loc,
2028 SourceLocation DLoc, Name scopeName,
2029 StringRef Kind,
2030 Name actual) override {
2031 PartialDiagnosticAt Warning(
2032 Loc, S.PDiag(DiagID: diag::warn_expect_fewer_underlying_mutexes)
2033 << Kind << scopeName << actual);
2034 Warnings.emplace_back(args: std::move(Warning),
2035 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2036 }
2037
2038 void handleInvalidLockExp(SourceLocation Loc) override {
2039 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_cannot_resolve_lock)
2040 << Loc);
2041 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2042 }
2043
2044 void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
2045 SourceLocation LocPreviousUnlock) override {
2046 if (Loc.isInvalid())
2047 Loc = FunLocation;
2048 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_unlock_but_no_lock)
2049 << Kind << LockName);
2050 Warnings.emplace_back(args: std::move(Warning),
2051 args: makeUnlockedHereNote(LocUnlocked: LocPreviousUnlock, Kind));
2052 }
2053
2054 void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
2055 LockKind Expected, LockKind Received,
2056 SourceLocation LocLocked,
2057 SourceLocation LocUnlock) override {
2058 if (LocUnlock.isInvalid())
2059 LocUnlock = FunLocation;
2060 PartialDiagnosticAt Warning(
2061 LocUnlock, S.PDiag(DiagID: diag::warn_unlock_kind_mismatch)
2062 << Kind << LockName << Received << Expected);
2063 Warnings.emplace_back(args: std::move(Warning),
2064 args: makeLockedHereNote(LocLocked, Kind));
2065 }
2066
2067 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
2068 SourceLocation LocDoubleLock) override {
2069 if (LocDoubleLock.isInvalid())
2070 LocDoubleLock = FunLocation;
2071 PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(DiagID: diag::warn_double_lock)
2072 << Kind << LockName);
2073 Warnings.emplace_back(args: std::move(Warning),
2074 args: makeLockedHereNote(LocLocked, Kind));
2075 }
2076
2077 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
2078 SourceLocation LocLocked,
2079 SourceLocation LocEndOfScope,
2080 LockErrorKind LEK,
2081 bool ReentrancyMismatch) override {
2082 unsigned DiagID = 0;
2083 switch (LEK) {
2084 case LEK_LockedSomePredecessors:
2085 DiagID = diag::warn_lock_some_predecessors;
2086 break;
2087 case LEK_LockedSomeLoopIterations:
2088 DiagID = diag::warn_expecting_lock_held_on_loop;
2089 break;
2090 case LEK_LockedAtEndOfFunction:
2091 DiagID = diag::warn_no_unlock;
2092 break;
2093 case LEK_NotLockedAtEndOfFunction:
2094 DiagID = diag::warn_expecting_locked;
2095 break;
2096 }
2097 if (LocEndOfScope.isInvalid())
2098 LocEndOfScope = FunEndLocation;
2099
2100 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID)
2101 << Kind << LockName
2102 << ReentrancyMismatch);
2103 Warnings.emplace_back(args: std::move(Warning),
2104 args: makeLockedHereNote(LocLocked, Kind));
2105 }
2106
2107 void handleExclusiveAndShared(StringRef Kind, Name LockName,
2108 SourceLocation Loc1,
2109 SourceLocation Loc2) override {
2110 PartialDiagnosticAt Warning(Loc1,
2111 S.PDiag(DiagID: diag::warn_lock_exclusive_and_shared)
2112 << Kind << LockName);
2113 PartialDiagnosticAt Note(Loc2, S.PDiag(DiagID: diag::note_lock_exclusive_and_shared)
2114 << Kind << LockName);
2115 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2116 }
2117
2118 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
2119 AccessKind AK, SourceLocation Loc) override {
2120 unsigned DiagID = 0;
2121 switch (POK) {
2122 case POK_VarAccess:
2123 case POK_PassByRef:
2124 case POK_ReturnByRef:
2125 case POK_PassPointer:
2126 case POK_ReturnPointer:
2127 DiagID = diag::warn_variable_requires_any_lock;
2128 break;
2129 case POK_VarDereference:
2130 case POK_PtPassByRef:
2131 case POK_PtReturnByRef:
2132 case POK_PtPassPointer:
2133 case POK_PtReturnPointer:
2134 DiagID = diag::warn_var_deref_requires_any_lock;
2135 break;
2136 case POK_FunctionCall:
2137 llvm_unreachable("Only works for variables");
2138 break;
2139 }
2140 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
2141 << D << getLockKindFromAccessKind(AK));
2142 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2143 }
2144
2145 void handleGuardedByAnyReadNotHeld(const NamedDecl *D,
2146 ProtectedOperationKind POK,
2147 ArrayRef<StringRef> LockNames,
2148 SourceLocation Loc) override {
2149 bool IsDeref;
2150 switch (POK) {
2151 case POK_VarAccess:
2152 case POK_PassByRef:
2153 case POK_ReturnByRef:
2154 case POK_PassPointer:
2155 case POK_ReturnPointer:
2156 IsDeref = false;
2157 break;
2158 case POK_VarDereference:
2159 case POK_PtPassByRef:
2160 case POK_PtReturnByRef:
2161 case POK_PtPassPointer:
2162 case POK_PtReturnPointer:
2163 IsDeref = true;
2164 break;
2165 case POK_FunctionCall:
2166 llvm_unreachable("POK_FunctionCall not applicable here");
2167 }
2168 std::string Quoted;
2169 llvm::raw_string_ostream OS(Quoted);
2170 llvm::ListSeparator LS;
2171 for (StringRef Name : LockNames)
2172 OS << LS << "'" << Name << "'";
2173 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_requires_any_of_locks)
2174 << D << IsDeref << Quoted);
2175 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2176 }
2177
2178 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
2179 ProtectedOperationKind POK, Name LockName,
2180 LockKind LK, SourceLocation Loc,
2181 Name *PossibleMatch) override {
2182 unsigned DiagID = 0;
2183 if (PossibleMatch) {
2184 switch (POK) {
2185 case POK_VarAccess:
2186 DiagID = diag::warn_variable_requires_lock_precise;
2187 break;
2188 case POK_VarDereference:
2189 DiagID = diag::warn_var_deref_requires_lock_precise;
2190 break;
2191 case POK_FunctionCall:
2192 DiagID = diag::warn_fun_requires_lock_precise;
2193 break;
2194 case POK_PassByRef:
2195 DiagID = diag::warn_guarded_pass_by_reference;
2196 break;
2197 case POK_PtPassByRef:
2198 DiagID = diag::warn_pt_guarded_pass_by_reference;
2199 break;
2200 case POK_ReturnByRef:
2201 DiagID = diag::warn_guarded_return_by_reference;
2202 break;
2203 case POK_PtReturnByRef:
2204 DiagID = diag::warn_pt_guarded_return_by_reference;
2205 break;
2206 case POK_PassPointer:
2207 DiagID = diag::warn_guarded_pass_pointer;
2208 break;
2209 case POK_PtPassPointer:
2210 DiagID = diag::warn_pt_guarded_pass_pointer;
2211 break;
2212 case POK_ReturnPointer:
2213 DiagID = diag::warn_guarded_return_pointer;
2214 break;
2215 case POK_PtReturnPointer:
2216 DiagID = diag::warn_pt_guarded_return_pointer;
2217 break;
2218 }
2219 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2220 << D
2221 << LockName << LK);
2222 PartialDiagnosticAt Note(Loc, S.PDiag(DiagID: diag::note_found_mutex_near_match)
2223 << *PossibleMatch);
2224 if (Verbose && POK == POK_VarAccess) {
2225 PartialDiagnosticAt VNote(D->getLocation(),
2226 S.PDiag(DiagID: diag::note_guarded_by_declared_here)
2227 << D->getDeclName());
2228 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note1: Note, Note2: VNote));
2229 } else
2230 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2231 } else {
2232 switch (POK) {
2233 case POK_VarAccess:
2234 DiagID = diag::warn_variable_requires_lock;
2235 break;
2236 case POK_VarDereference:
2237 DiagID = diag::warn_var_deref_requires_lock;
2238 break;
2239 case POK_FunctionCall:
2240 DiagID = diag::warn_fun_requires_lock;
2241 break;
2242 case POK_PassByRef:
2243 DiagID = diag::warn_guarded_pass_by_reference;
2244 break;
2245 case POK_PtPassByRef:
2246 DiagID = diag::warn_pt_guarded_pass_by_reference;
2247 break;
2248 case POK_ReturnByRef:
2249 DiagID = diag::warn_guarded_return_by_reference;
2250 break;
2251 case POK_PtReturnByRef:
2252 DiagID = diag::warn_pt_guarded_return_by_reference;
2253 break;
2254 case POK_PassPointer:
2255 DiagID = diag::warn_guarded_pass_pointer;
2256 break;
2257 case POK_PtPassPointer:
2258 DiagID = diag::warn_pt_guarded_pass_pointer;
2259 break;
2260 case POK_ReturnPointer:
2261 DiagID = diag::warn_guarded_return_pointer;
2262 break;
2263 case POK_PtReturnPointer:
2264 DiagID = diag::warn_pt_guarded_return_pointer;
2265 break;
2266 }
2267 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2268 << D
2269 << LockName << LK);
2270 if (Verbose && POK == POK_VarAccess) {
2271 PartialDiagnosticAt Note(D->getLocation(),
2272 S.PDiag(DiagID: diag::note_guarded_by_declared_here));
2273 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2274 } else
2275 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2276 }
2277 }
2278
2279 void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2280 SourceLocation Loc) override {
2281 PartialDiagnosticAt Warning(Loc,
2282 S.PDiag(DiagID: diag::warn_acquire_requires_negative_cap)
2283 << Kind << LockName << Neg);
2284 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2285 }
2286
2287 void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2288 SourceLocation Loc) override {
2289 PartialDiagnosticAt Warning(
2290 Loc, S.PDiag(DiagID: diag::warn_fun_requires_negative_cap) << D << LockName);
2291 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2292 }
2293
2294 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2295 SourceLocation Loc) override {
2296 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_fun_excludes_mutex)
2297 << Kind << FunName << LockName);
2298 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2299 }
2300
2301 void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2302 SourceLocation Loc) override {
2303 PartialDiagnosticAt Warning(Loc,
2304 S.PDiag(DiagID: diag::warn_acquired_before) << Kind << L1Name << L2Name);
2305 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2306 }
2307
2308 void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2309 PartialDiagnosticAt Warning(Loc,
2310 S.PDiag(DiagID: diag::warn_acquired_before_after_cycle) << L1Name);
2311 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2312 }
2313
2314 void enterFunction(const FunctionDecl* FD) override {
2315 CurrentFunction = FD;
2316 }
2317
2318 void leaveFunction(const FunctionDecl* FD) override {
2319 CurrentFunction = nullptr;
2320 }
2321};
2322} // anonymous namespace
2323} // namespace threadSafety
2324} // namespace clang
2325
2326//===----------------------------------------------------------------------===//
2327// -Wconsumed
2328//===----------------------------------------------------------------------===//
2329
2330namespace clang {
2331namespace consumed {
2332namespace {
2333class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2334
2335 Sema &S;
2336 DiagList Warnings;
2337
2338public:
2339
2340 ConsumedWarningsHandler(Sema &S) : S(S) {}
2341
2342 void emitDiagnostics() override {
2343 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
2344 for (const auto &Diag : Warnings) {
2345 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
2346 for (const auto &Note : Diag.second)
2347 S.Diag(Loc: Note.first, PD: Note.second);
2348 }
2349 }
2350
2351 void warnLoopStateMismatch(SourceLocation Loc,
2352 StringRef VariableName) override {
2353 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_loop_state_mismatch) <<
2354 VariableName);
2355
2356 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2357 }
2358
2359 void warnParamReturnTypestateMismatch(SourceLocation Loc,
2360 StringRef VariableName,
2361 StringRef ExpectedState,
2362 StringRef ObservedState) override {
2363
2364 PartialDiagnosticAt Warning(Loc, S.PDiag(
2365 DiagID: diag::warn_param_return_typestate_mismatch) << VariableName <<
2366 ExpectedState << ObservedState);
2367
2368 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2369 }
2370
2371 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2372 StringRef ObservedState) override {
2373
2374 PartialDiagnosticAt Warning(Loc, S.PDiag(
2375 DiagID: diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2376
2377 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2378 }
2379
2380 void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2381 StringRef TypeName) override {
2382 PartialDiagnosticAt Warning(Loc, S.PDiag(
2383 DiagID: diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2384
2385 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2386 }
2387
2388 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2389 StringRef ObservedState) override {
2390
2391 PartialDiagnosticAt Warning(Loc, S.PDiag(
2392 DiagID: diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2393
2394 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2395 }
2396
2397 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2398 SourceLocation Loc) override {
2399
2400 PartialDiagnosticAt Warning(Loc, S.PDiag(
2401 DiagID: diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2402
2403 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2404 }
2405
2406 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2407 StringRef State, SourceLocation Loc) override {
2408
2409 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_use_in_invalid_state) <<
2410 MethodName << VariableName << State);
2411
2412 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2413 }
2414};
2415} // anonymous namespace
2416} // namespace consumed
2417} // namespace clang
2418
2419//===----------------------------------------------------------------------===//
2420// Unsafe buffer usage analysis.
2421//===----------------------------------------------------------------------===//
2422
2423namespace {
2424class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2425 Sema &S;
2426 bool SuggestSuggestions; // Recommend -fsafe-buffer-usage-suggestions?
2427
2428 // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2429 // itself:
2430 std::string listVariableGroupAsString(
2431 const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2432 if (VarGroupForVD.size() <= 1)
2433 return "";
2434
2435 std::vector<StringRef> VarNames;
2436 auto PutInQuotes = [](StringRef S) -> std::string {
2437 return "'" + S.str() + "'";
2438 };
2439
2440 for (auto *V : VarGroupForVD) {
2441 if (V == VD)
2442 continue;
2443 VarNames.push_back(x: V->getName());
2444 }
2445 if (VarNames.size() == 1) {
2446 return PutInQuotes(VarNames[0]);
2447 }
2448 if (VarNames.size() == 2) {
2449 return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2450 }
2451 assert(VarGroupForVD.size() > 3);
2452 const unsigned N = VarNames.size() -
2453 2; // need to print the last two names as "..., X, and Y"
2454 std::string AllVars = "";
2455
2456 for (unsigned I = 0; I < N; ++I)
2457 AllVars.append(str: PutInQuotes(VarNames[I]) + ", ");
2458 AllVars.append(str: PutInQuotes(VarNames[N]) + ", and " +
2459 PutInQuotes(VarNames[N + 1]));
2460 return AllVars;
2461 }
2462
2463public:
2464 UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2465 : S(S), SuggestSuggestions(SuggestSuggestions) {}
2466
2467 void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2468 ASTContext &Ctx) override {
2469 SourceLocation Loc;
2470 SourceRange Range;
2471 unsigned MsgParam = 0;
2472 NamedDecl *D = nullptr;
2473 if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Val: Operation)) {
2474 Loc = ASE->getBase()->getExprLoc();
2475 Range = ASE->getBase()->getSourceRange();
2476 MsgParam = 2;
2477 } else if (const auto *BO = dyn_cast<BinaryOperator>(Val: Operation)) {
2478 BinaryOperator::Opcode Op = BO->getOpcode();
2479 if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2480 Op == BO_SubAssign) {
2481 if (BO->getRHS()->getType()->isIntegerType()) {
2482 Loc = BO->getLHS()->getExprLoc();
2483 Range = BO->getLHS()->getSourceRange();
2484 } else {
2485 Loc = BO->getRHS()->getExprLoc();
2486 Range = BO->getRHS()->getSourceRange();
2487 }
2488 MsgParam = 1;
2489 }
2490 } else if (const auto *UO = dyn_cast<UnaryOperator>(Val: Operation)) {
2491 UnaryOperator::Opcode Op = UO->getOpcode();
2492 if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2493 Op == UO_PostDec) {
2494 Loc = UO->getSubExpr()->getExprLoc();
2495 Range = UO->getSubExpr()->getSourceRange();
2496 MsgParam = 1;
2497 }
2498 } else {
2499 if (isa<CallExpr>(Val: Operation) || isa<CXXConstructExpr>(Val: Operation)) {
2500 // note_unsafe_buffer_operation doesn't have this mode yet.
2501 assert(!IsRelatedToDecl && "Not implemented yet!");
2502 MsgParam = 3;
2503 } else if (isa<MemberExpr>(Val: Operation)) {
2504 // note_unsafe_buffer_operation doesn't have this mode yet.
2505 assert(!IsRelatedToDecl && "Not implemented yet!");
2506 auto *ME = cast<MemberExpr>(Val: Operation);
2507 D = ME->getMemberDecl();
2508 MsgParam = 5;
2509 } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Val: Operation)) {
2510 QualType destType = ECE->getType();
2511 bool destTypeComplete = true;
2512
2513 if (!isa<PointerType>(Val: destType))
2514 return;
2515 destType = destType.getTypePtr()->getPointeeType();
2516 if (const auto *D = destType->getAsTagDecl())
2517 destTypeComplete = D->isCompleteDefinition();
2518
2519 // If destination type is incomplete, it is unsafe to cast to anyway, no
2520 // need to check its type:
2521 if (destTypeComplete) {
2522 const uint64_t dSize = Ctx.getTypeSize(T: destType);
2523 QualType srcType = ECE->getSubExpr()->getType();
2524
2525 assert(srcType->isPointerType());
2526
2527 const uint64_t sSize =
2528 Ctx.getTypeSize(T: srcType.getTypePtr()->getPointeeType());
2529
2530 if (sSize >= dSize)
2531 return;
2532 }
2533 if (const auto *CE = dyn_cast<CXXMemberCallExpr>(
2534 Val: ECE->getSubExpr()->IgnoreParens())) {
2535 D = CE->getMethodDecl();
2536 }
2537
2538 if (!D)
2539 return;
2540
2541 MsgParam = 4;
2542 }
2543 Loc = Operation->getBeginLoc();
2544 Range = Operation->getSourceRange();
2545 }
2546 if (IsRelatedToDecl) {
2547 assert(!SuggestSuggestions &&
2548 "Variables blamed for unsafe buffer usage without suggestions!");
2549 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2550 } else {
2551 if (D) {
2552 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation)
2553 << MsgParam << D << Range;
2554 } else {
2555 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2556 }
2557 if (SuggestSuggestions) {
2558 S.Diag(Loc, DiagID: diag::note_safe_buffer_usage_suggestions_disabled);
2559 }
2560 }
2561 }
2562
2563 void handleUnsafeLibcCall(const CallExpr *Call, unsigned PrintfInfo,
2564 ASTContext &Ctx,
2565 const Expr *UnsafeArg = nullptr) override {
2566 unsigned DiagID = diag::warn_unsafe_buffer_libc_call;
2567 if (PrintfInfo & 0x8) {
2568 // The callee is a function with the format attribute. See the
2569 // documentation of PrintfInfo in UnsafeBufferUsageHandler, and
2570 // UnsafeLibcFunctionCallGadget::UnsafeKind.
2571 DiagID = diag::warn_unsafe_buffer_format_attr_call;
2572 PrintfInfo ^= 0x8;
2573 }
2574 S.Diag(Loc: Call->getBeginLoc(), DiagID)
2575 << Call->getDirectCallee() // We've checked there is a direct callee
2576 << Call->getSourceRange();
2577 if (PrintfInfo > 0) {
2578 SourceRange R =
2579 UnsafeArg ? UnsafeArg->getSourceRange() : Call->getSourceRange();
2580 S.Diag(Loc: R.getBegin(), DiagID: diag::note_unsafe_buffer_printf_call)
2581 << PrintfInfo << R;
2582 }
2583 }
2584
2585 void handleUnsafeOperationInContainer(const Stmt *Operation,
2586 bool IsRelatedToDecl,
2587 ASTContext &Ctx) override {
2588 SourceLocation Loc;
2589 SourceRange Range;
2590 unsigned MsgParam = 0;
2591
2592 // This function only handles SpanTwoParamConstructorGadget so far, which
2593 // always gives a CXXConstructExpr.
2594 const auto *CtorExpr = cast<CXXConstructExpr>(Val: Operation);
2595 Loc = CtorExpr->getLocation();
2596
2597 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_in_container);
2598 if (IsRelatedToDecl) {
2599 assert(!SuggestSuggestions &&
2600 "Variables blamed for unsafe buffer usage without suggestions!");
2601 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2602 }
2603 }
2604
2605 void handleUnsafeVariableGroup(const VarDecl *Variable,
2606 const VariableGroupsManager &VarGrpMgr,
2607 FixItList &&Fixes, const Decl *D,
2608 const FixitStrategy &VarTargetTypes) override {
2609 assert(!SuggestSuggestions &&
2610 "Unsafe buffer usage fixits displayed without suggestions!");
2611 S.Diag(Loc: Variable->getLocation(), DiagID: diag::warn_unsafe_buffer_variable)
2612 << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2613 << Variable->getSourceRange();
2614 if (!Fixes.empty()) {
2615 assert(isa<NamedDecl>(D) &&
2616 "Fix-its are generated only for `NamedDecl`s");
2617 const NamedDecl *ND = cast<NamedDecl>(Val: D);
2618 bool BriefMsg = false;
2619 // If the variable group involves parameters, the diagnostic message will
2620 // NOT explain how the variables are grouped as the reason is non-trivial
2621 // and irrelavant to users' experience:
2622 const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Var: Variable, HasParm: &BriefMsg);
2623 unsigned FixItStrategy = 0;
2624 switch (VarTargetTypes.lookup(VD: Variable)) {
2625 case clang::FixitStrategy::Kind::Span:
2626 FixItStrategy = 0;
2627 break;
2628 case clang::FixitStrategy::Kind::Array:
2629 FixItStrategy = 1;
2630 break;
2631 default:
2632 assert(false && "We support only std::span and std::array");
2633 };
2634
2635 const auto &FD =
2636 S.Diag(Loc: Variable->getLocation(),
2637 DiagID: BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2638 : diag::note_unsafe_buffer_variable_fixit_group);
2639
2640 FD << Variable << FixItStrategy;
2641 FD << listVariableGroupAsString(VD: Variable, VarGroupForVD)
2642 << (VarGroupForVD.size() > 1) << ND;
2643 for (const auto &F : Fixes) {
2644 FD << F;
2645 }
2646 }
2647
2648#ifndef NDEBUG
2649 if (areDebugNotesRequested())
2650 for (const DebugNote &Note: DebugNotesByVar[Variable])
2651 S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2652#endif
2653 }
2654
2655 void handleUnsafeUniquePtrArrayAccess(const DynTypedNode &Node,
2656 bool IsRelatedToDecl,
2657 ASTContext &Ctx) override {
2658 SourceLocation Loc;
2659
2660 Loc = Node.get<Stmt>()->getBeginLoc();
2661 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_unique_ptr_array_access)
2662 << Node.getSourceRange();
2663 }
2664
2665 bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2666 return S.PP.isSafeBufferOptOut(SourceMgr: S.getSourceManager(), Loc);
2667 }
2668
2669 bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2670 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container, Loc);
2671 }
2672
2673 bool ignoreUnsafeBufferInLibcCall(const SourceLocation &Loc) const override {
2674 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc);
2675 }
2676
2677 bool ignoreUnsafeBufferInStaticSizedArray(
2678 const SourceLocation &Loc) const override {
2679 return S.Diags.isIgnored(
2680 DiagID: diag::warn_unsafe_buffer_usage_in_static_sized_array, Loc);
2681 }
2682
2683 // Returns the text representation of clang::unsafe_buffer_usage attribute.
2684 // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2685 // characters.
2686 std::string
2687 getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2688 StringRef WSSuffix = "") const override {
2689 Preprocessor &PP = S.getPreprocessor();
2690 TokenValue ClangUnsafeBufferUsageTokens[] = {
2691 tok::l_square,
2692 tok::l_square,
2693 PP.getIdentifierInfo(Name: "clang"),
2694 tok::coloncolon,
2695 PP.getIdentifierInfo(Name: "unsafe_buffer_usage"),
2696 tok::r_square,
2697 tok::r_square};
2698
2699 StringRef MacroName;
2700
2701 // The returned macro (it returns) is guaranteed not to be function-like:
2702 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangUnsafeBufferUsageTokens);
2703 if (MacroName.empty())
2704 MacroName = "[[clang::unsafe_buffer_usage]]";
2705 return MacroName.str() + WSSuffix.str();
2706 }
2707};
2708} // namespace
2709
2710//===----------------------------------------------------------------------===//
2711// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2712// warnings on a function, method, or block.
2713//===----------------------------------------------------------------------===//
2714
2715sema::AnalysisBasedWarnings::Policy::Policy() {
2716 enableCheckFallThrough = 1;
2717 enableCheckUnreachable = 0;
2718 enableThreadSafetyAnalysis = 0;
2719 enableConsumedAnalysis = 0;
2720}
2721
2722/// InterProceduralData aims to be a storage of whatever data should be passed
2723/// between analyses of different functions.
2724///
2725/// At the moment, its primary goal is to make the information gathered during
2726/// the analysis of the blocks available during the analysis of the enclosing
2727/// function. This is important due to the fact that blocks are analyzed before
2728/// the enclosed function is even parsed fully, so it is not viable to access
2729/// anything in the outer scope while analyzing the block. On the other hand,
2730/// re-building CFG for blocks and re-analyzing them when we do have all the
2731/// information (i.e. during the analysis of the enclosing function) seems to be
2732/// ill-designed.
2733class sema::AnalysisBasedWarnings::InterProceduralData {
2734public:
2735 // It is important to analyze blocks within functions because it's a very
2736 // common pattern to capture completion handler parameters by blocks.
2737 CalledOnceInterProceduralData CalledOnceData;
2738};
2739
2740template <typename... Ts>
2741static bool areAnyEnabled(DiagnosticsEngine &D, SourceLocation Loc,
2742 Ts... Diags) {
2743 return (!D.isIgnored(DiagID: Diags, Loc) || ...);
2744}
2745
2746sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2747 : S(s), IPData(std::make_unique<InterProceduralData>()),
2748 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2749 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2750 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2751 NumUninitAnalysisBlockVisits(0),
2752 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2753}
2754
2755// We need this here for unique_ptr with forward declared class.
2756sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2757
2758sema::AnalysisBasedWarnings::Policy
2759sema::AnalysisBasedWarnings::getPolicyInEffectAt(SourceLocation Loc) {
2760 using namespace diag;
2761 DiagnosticsEngine &D = S.getDiagnostics();
2762 Policy P;
2763
2764 // Note: The enabled checks should be kept in sync with the switch in
2765 // SemaPPCallbacks::PragmaDiagnostic().
2766 P.enableCheckUnreachable =
2767 PolicyOverrides.enableCheckUnreachable ||
2768 areAnyEnabled(D, Loc, Diags: warn_unreachable, Diags: warn_unreachable_break,
2769 Diags: warn_unreachable_return, Diags: warn_unreachable_loop_increment);
2770
2771 P.enableThreadSafetyAnalysis = PolicyOverrides.enableThreadSafetyAnalysis ||
2772 areAnyEnabled(D, Loc, Diags: warn_double_lock);
2773
2774 P.enableConsumedAnalysis = PolicyOverrides.enableConsumedAnalysis ||
2775 areAnyEnabled(D, Loc, Diags: warn_use_in_invalid_state);
2776 return P;
2777}
2778
2779void sema::AnalysisBasedWarnings::clearOverrides() {
2780 PolicyOverrides.enableCheckUnreachable = false;
2781 PolicyOverrides.enableConsumedAnalysis = false;
2782 PolicyOverrides.enableThreadSafetyAnalysis = false;
2783}
2784
2785static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2786 for (const auto &D : fscope->PossiblyUnreachableDiags)
2787 S.Diag(Loc: D.Loc, PD: D.PD);
2788}
2789
2790template <typename Iterator>
2791static void emitPossiblyUnreachableDiags(Sema &S, AnalysisDeclContext &AC,
2792 std::pair<Iterator, Iterator> PUDs) {
2793
2794 if (PUDs.first == PUDs.second)
2795 return;
2796
2797 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2798 for (const Stmt *S : I->Stmts)
2799 AC.registerForcedBlockExpression(stmt: S);
2800 }
2801
2802 if (AC.getCFG()) {
2803 CFGReverseBlockReachabilityAnalysis *Analysis =
2804 AC.getCFGReachablityAnalysis();
2805
2806 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2807 const auto &D = *I;
2808 if (llvm::all_of(D.Stmts, [&](const Stmt *St) {
2809 const CFGBlock *Block = AC.getBlockForRegisteredExpression(stmt: St);
2810 // FIXME: We should be able to assert that block is non-null, but
2811 // the CFG analysis can skip potentially-evaluated expressions in
2812 // edge cases; see test/Sema/vla-2.c.
2813 if (Block && Analysis)
2814 if (!Analysis->isReachable(Src: &AC.getCFG()->getEntry(), Dst: Block))
2815 return false;
2816 return true;
2817 })) {
2818 S.Diag(D.Loc, D.PD);
2819 }
2820 }
2821 } else {
2822 for (auto I = PUDs.first; I != PUDs.second; ++I)
2823 S.Diag(I->Loc, I->PD);
2824 }
2825}
2826
2827void sema::AnalysisBasedWarnings::registerVarDeclWarning(
2828 VarDecl *VD, clang::sema::PossiblyUnreachableDiag PUD) {
2829 VarDeclPossiblyUnreachableDiags.emplace(args&: VD, args&: PUD);
2830}
2831
2832void sema::AnalysisBasedWarnings::issueWarningsForRegisteredVarDecl(
2833 VarDecl *VD) {
2834 if (!llvm::is_contained(Range&: VarDeclPossiblyUnreachableDiags, Element: VD))
2835 return;
2836
2837 AnalysisDeclContext AC(/*Mgr=*/nullptr, VD);
2838
2839 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2840 AC.getCFGBuildOptions().AddEHEdges = false;
2841 AC.getCFGBuildOptions().AddInitializers = true;
2842 AC.getCFGBuildOptions().AddImplicitDtors = true;
2843 AC.getCFGBuildOptions().AddTemporaryDtors = true;
2844 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2845 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2846
2847 auto Range = VarDeclPossiblyUnreachableDiags.equal_range(x: VD);
2848 auto SecondRange =
2849 llvm::make_second_range(c: llvm::make_range(x: Range.first, y: Range.second));
2850 emitPossiblyUnreachableDiags(
2851 S, AC, PUDs: std::make_pair(x: SecondRange.begin(), y: SecondRange.end()));
2852}
2853
2854// An AST Visitor that calls a callback function on each callable DEFINITION
2855// that is NOT in a dependent context:
2856class CallableVisitor : public DynamicRecursiveASTVisitor {
2857private:
2858 llvm::function_ref<void(const Decl *)> Callback;
2859 const Module *const TUModule;
2860
2861public:
2862 CallableVisitor(llvm::function_ref<void(const Decl *)> Callback,
2863 const Module *const TUModule)
2864 : Callback(Callback), TUModule(TUModule) {
2865 ShouldVisitTemplateInstantiations = true;
2866 ShouldVisitImplicitCode = false;
2867 }
2868
2869 bool TraverseDecl(Decl *Node) override {
2870 // For performance reasons, only validate the current translation unit's
2871 // module, and not modules it depends on.
2872 // See https://issues.chromium.org/issues/351909443 for details.
2873 if (Node && Node->getOwningModule() == TUModule)
2874 return DynamicRecursiveASTVisitor::TraverseDecl(D: Node);
2875 return true;
2876 }
2877
2878 bool VisitFunctionDecl(FunctionDecl *Node) override {
2879 if (cast<DeclContext>(Val: Node)->isDependentContext())
2880 return true; // Not to analyze dependent decl
2881 // `FunctionDecl->hasBody()` returns true if the function has a body
2882 // somewhere defined. But we want to know if this `Node` has a body
2883 // child. So we use `doesThisDeclarationHaveABody`:
2884 if (Node->doesThisDeclarationHaveABody())
2885 Callback(Node);
2886 return true;
2887 }
2888
2889 bool VisitBlockDecl(BlockDecl *Node) override {
2890 if (cast<DeclContext>(Val: Node)->isDependentContext())
2891 return true; // Not to analyze dependent decl
2892 Callback(Node);
2893 return true;
2894 }
2895
2896 bool VisitObjCMethodDecl(ObjCMethodDecl *Node) override {
2897 if (cast<DeclContext>(Val: Node)->isDependentContext())
2898 return true; // Not to analyze dependent decl
2899 if (Node->hasBody())
2900 Callback(Node);
2901 return true;
2902 }
2903
2904 bool VisitLambdaExpr(LambdaExpr *Node) override {
2905 return VisitFunctionDecl(Node: Node->getCallOperator());
2906 }
2907};
2908
2909static void
2910LifetimeSafetyTUAnalysis(Sema &S, TranslationUnitDecl *TU,
2911 clang::lifetimes::LifetimeSafetyStats &LSStats) {
2912 llvm::TimeTraceScope TimeProfile("LifetimeSafetyTUAnalysis");
2913 CallGraph CG;
2914 CG.addToCallGraph(D: TU);
2915 lifetimes::LifetimeSafetySemaHelperImpl SemaHelper(S);
2916 for (auto *Node : llvm::post_order(G: &CG)) {
2917 const clang::FunctionDecl *CanonicalFD =
2918 dyn_cast_or_null<clang::FunctionDecl>(Val: Node->getDecl());
2919 if (!CanonicalFD)
2920 continue;
2921 const FunctionDecl *FD = CanonicalFD->getDefinition();
2922 if (!FD)
2923 continue;
2924 AnalysisDeclContext AC(nullptr, FD);
2925 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = false;
2926 AC.getCFGBuildOptions().AddLifetime = true;
2927 AC.getCFGBuildOptions().AddParameterLifetimes = true;
2928 AC.getCFGBuildOptions().setAllAlwaysAdd();
2929 if (AC.getCFG())
2930 runLifetimeSafetyAnalysis(AC, SemaHelper: &SemaHelper, Stats&: LSStats, CollectStats: S.CollectStats);
2931 }
2932}
2933
2934void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2935 TranslationUnitDecl *TU) {
2936 if (!TU)
2937 return; // This is unexpected, give up quietly.
2938
2939 DiagnosticsEngine &Diags = S.getDiagnostics();
2940
2941 if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2942 // exit if having uncompilable errors or ignoring all warnings:
2943 return;
2944
2945 DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2946
2947 // UnsafeBufferUsage analysis settings.
2948 bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2949 bool UnsafeBufferUsageShouldEmitSuggestions = // Should != Can.
2950 UnsafeBufferUsageCanEmitSuggestions &&
2951 DiagOpts.ShowSafeBufferUsageSuggestions;
2952 bool UnsafeBufferUsageShouldSuggestSuggestions =
2953 UnsafeBufferUsageCanEmitSuggestions &&
2954 !DiagOpts.ShowSafeBufferUsageSuggestions;
2955 UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2956
2957 // The Callback function that performs analyses:
2958 auto CallAnalyzers = [&](const Decl *Node) -> void {
2959 if (Node->hasAttr<UnsafeBufferUsageAttr>())
2960 return;
2961
2962 // Perform unsafe buffer usage analysis:
2963 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation,
2964 Loc: Node->getBeginLoc()) ||
2965 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable,
2966 Loc: Node->getBeginLoc()) ||
2967 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
2968 Loc: Node->getBeginLoc()) ||
2969 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call,
2970 Loc: Node->getBeginLoc())) {
2971 clang::checkUnsafeBufferUsage(D: Node, Handler&: R,
2972 EmitSuggestions: UnsafeBufferUsageShouldEmitSuggestions);
2973 }
2974
2975 // More analysis ...
2976 };
2977 // Emit per-function analysis-based warnings that require the whole-TU
2978 // reasoning. Check if any of them is enabled at all before scanning the AST:
2979 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation, Loc: SourceLocation()) ||
2980 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable, Loc: SourceLocation()) ||
2981 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
2982 Loc: SourceLocation()) ||
2983 (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc: SourceLocation()) &&
2984 S.getLangOpts().CPlusPlus /* only warn about libc calls in C++ */)) {
2985 CallableVisitor(CallAnalyzers, TU->getOwningModule())
2986 .TraverseTranslationUnitDecl(D: TU);
2987 }
2988
2989 if (S.getLangOpts().EnableLifetimeSafety && S.getLangOpts().CPlusPlus &&
2990 S.getLangOpts().EnableLifetimeSafetyTUAnalysis)
2991 LifetimeSafetyTUAnalysis(S, TU, LSStats);
2992}
2993
2994void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2995 sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2996 const Decl *D, QualType BlockType) {
2997
2998 // We avoid doing analysis-based warnings when there are errors for
2999 // two reasons:
3000 // (1) The CFGs often can't be constructed (if the body is invalid), so
3001 // don't bother trying.
3002 // (2) The code already has problems; running the analysis just takes more
3003 // time.
3004 DiagnosticsEngine &Diags = S.getDiagnostics();
3005
3006 // Do not do any analysis if we are going to just ignore them.
3007 if (Diags.getIgnoreAllWarnings() ||
3008 (Diags.getSuppressSystemWarnings() &&
3009 S.SourceMgr.isInSystemHeader(Loc: D->getLocation())))
3010 return;
3011
3012 // For code in dependent contexts, we'll do this at instantiation time.
3013 if (cast<DeclContext>(Val: D)->isDependentContext())
3014 return;
3015
3016 if (S.hasUncompilableErrorOccurred()) {
3017 // Flush out any possibly unreachable diagnostics.
3018 flushDiagnostics(S, fscope);
3019 return;
3020 }
3021
3022 const Stmt *Body = D->getBody();
3023 assert(Body);
3024
3025 // Construct the analysis context with the specified CFG build options.
3026 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
3027
3028 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
3029 // explosion for destructors that can result and the compile time hit.
3030 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
3031 AC.getCFGBuildOptions().AddEHEdges = false;
3032 AC.getCFGBuildOptions().AddInitializers = true;
3033 AC.getCFGBuildOptions().AddImplicitDtors = true;
3034 AC.getCFGBuildOptions().AddParameterLifetimes = true;
3035 AC.getCFGBuildOptions().AddTemporaryDtors = true;
3036 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
3037 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
3038
3039 bool EnableLifetimeSafetyAnalysis =
3040 S.getLangOpts().EnableLifetimeSafety &&
3041 !S.getLangOpts().EnableLifetimeSafetyTUAnalysis &&
3042 lifetimes::IsLifetimeSafetyDiagnosticEnabled(S, D);
3043
3044 // Force that certain expressions appear as CFGElements in the CFG. This
3045 // is used to speed up various analyses.
3046 // FIXME: This isn't the right factoring. This is here for initial
3047 // prototyping, but we need a way for analyses to say what expressions they
3048 // expect to always be CFGElements and then fill in the BuildOptions
3049 // appropriately. This is essentially a layering violation.
3050 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
3051 P.enableConsumedAnalysis || EnableLifetimeSafetyAnalysis) {
3052 // Unreachable code analysis and thread safety require a linearized CFG.
3053 AC.getCFGBuildOptions().setAllAlwaysAdd();
3054 } else {
3055 AC.getCFGBuildOptions()
3056 .setAlwaysAdd(stmtClass: Stmt::BinaryOperatorClass)
3057 .setAlwaysAdd(stmtClass: Stmt::CompoundAssignOperatorClass)
3058 .setAlwaysAdd(stmtClass: Stmt::BlockExprClass)
3059 .setAlwaysAdd(stmtClass: Stmt::CStyleCastExprClass)
3060 .setAlwaysAdd(stmtClass: Stmt::DeclRefExprClass)
3061 .setAlwaysAdd(stmtClass: Stmt::ImplicitCastExprClass)
3062 .setAlwaysAdd(stmtClass: Stmt::UnaryOperatorClass);
3063 }
3064 if (EnableLifetimeSafetyAnalysis)
3065 AC.getCFGBuildOptions().AddLifetime = true;
3066
3067 // Install the logical handler.
3068 std::optional<LogicalErrorHandler> LEH;
3069 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3070 LEH.emplace(args&: S);
3071 AC.getCFGBuildOptions().Observer = &*LEH;
3072 }
3073
3074 // Emit delayed diagnostics.
3075 auto &PUDs = fscope->PossiblyUnreachableDiags;
3076 emitPossiblyUnreachableDiags(S, AC, PUDs: std::make_pair(x: PUDs.begin(), y: PUDs.end()));
3077
3078 // Warning: check missing 'return'
3079 if (P.enableCheckFallThrough) {
3080 const CheckFallThroughDiagnostics &CD =
3081 (isa<BlockDecl>(Val: D) ? CheckFallThroughDiagnostics::MakeForBlock()
3082 : (isa<CXXMethodDecl>(Val: D) &&
3083 cast<CXXMethodDecl>(Val: D)->getOverloadedOperator() == OO_Call &&
3084 cast<CXXMethodDecl>(Val: D)->getParent()->isLambda())
3085 ? CheckFallThroughDiagnostics::MakeForLambda()
3086 : (fscope->isCoroutine()
3087 ? CheckFallThroughDiagnostics::MakeForCoroutine(Func: D)
3088 : CheckFallThroughDiagnostics::MakeForFunction(S, Func: D)));
3089 CheckFallThroughForBody(S, D, Body, BlockType, CD, AC);
3090 }
3091
3092 // Warning: check for unreachable code
3093 if (P.enableCheckUnreachable) {
3094 // Only check for unreachable code on non-template instantiations.
3095 // Different template instantiations can effectively change the control-flow
3096 // and it is very difficult to prove that a snippet of code in a template
3097 // is unreachable for all instantiations.
3098 bool isTemplateInstantiation = false;
3099 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: D))
3100 isTemplateInstantiation = Function->isTemplateInstantiation();
3101 if (!isTemplateInstantiation)
3102 CheckUnreachable(S, AC);
3103 }
3104
3105 // Check for thread safety violations
3106 if (P.enableThreadSafetyAnalysis) {
3107 SourceLocation FL = AC.getDecl()->getLocation();
3108 SourceLocation FEL = AC.getDecl()->getEndLoc();
3109 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
3110 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_beta, Loc: D->getBeginLoc()))
3111 Reporter.setIssueBetaWarnings(true);
3112 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_verbose, Loc: D->getBeginLoc()))
3113 Reporter.setVerbose(true);
3114
3115 threadSafety::runThreadSafetyAnalysis(AC, Handler&: Reporter,
3116 Bset: &S.ThreadSafetyDeclCache);
3117 Reporter.emitDiagnostics();
3118 }
3119
3120 // Check for violations of consumed properties.
3121 if (P.enableConsumedAnalysis) {
3122 consumed::ConsumedWarningsHandler WarningHandler(S);
3123 consumed::ConsumedAnalyzer Analyzer(WarningHandler);
3124 Analyzer.run(AC);
3125 }
3126
3127 if (!Diags.isIgnored(DiagID: diag::warn_uninit_var, Loc: D->getBeginLoc()) ||
3128 !Diags.isIgnored(DiagID: diag::warn_sometimes_uninit_var, Loc: D->getBeginLoc()) ||
3129 !Diags.isIgnored(DiagID: diag::warn_maybe_uninit_var, Loc: D->getBeginLoc()) ||
3130 !Diags.isIgnored(DiagID: diag::warn_uninit_const_reference, Loc: D->getBeginLoc()) ||
3131 !Diags.isIgnored(DiagID: diag::warn_uninit_const_pointer, Loc: D->getBeginLoc())) {
3132 if (CFG *cfg = AC.getCFG()) {
3133 UninitValsDiagReporter reporter(S);
3134 UninitVariablesAnalysisStats stats;
3135 std::memset(s: &stats, c: 0, n: sizeof(UninitVariablesAnalysisStats));
3136 runUninitializedVariablesAnalysis(dc: *cast<DeclContext>(Val: D), cfg: *cfg, ac&: AC,
3137 handler&: reporter, stats);
3138
3139 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
3140 ++NumUninitAnalysisFunctions;
3141 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
3142 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
3143 MaxUninitAnalysisVariablesPerFunction =
3144 std::max(a: MaxUninitAnalysisVariablesPerFunction,
3145 b: stats.NumVariablesAnalyzed);
3146 MaxUninitAnalysisBlockVisitsPerFunction =
3147 std::max(a: MaxUninitAnalysisBlockVisitsPerFunction,
3148 b: stats.NumBlockVisits);
3149 }
3150 }
3151 }
3152
3153 // TODO: Enable lifetime safety analysis for other languages once it is
3154 // stable.
3155 if (EnableLifetimeSafetyAnalysis && S.getLangOpts().CPlusPlus) {
3156 if (AC.getCFG()) {
3157 lifetimes::LifetimeSafetySemaHelperImpl LifetimeSafetySemaHelper(S);
3158 lifetimes::runLifetimeSafetyAnalysis(AC, SemaHelper: &LifetimeSafetySemaHelper,
3159 Stats&: LSStats, CollectStats: S.CollectStats);
3160 }
3161 }
3162 // Check for violations of "called once" parameter properties.
3163 if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
3164 shouldAnalyzeCalledOnceParameters(Diags, At: D->getBeginLoc())) {
3165 if (AC.getCFG()) {
3166 CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
3167 checkCalledOnceParameters(
3168 AC, Handler&: Reporter,
3169 CheckConventionalParameters: shouldAnalyzeCalledOnceConventions(Diags, At: D->getBeginLoc()));
3170 }
3171 }
3172
3173 bool FallThroughDiagFull =
3174 !Diags.isIgnored(DiagID: diag::warn_unannotated_fallthrough, Loc: D->getBeginLoc());
3175 bool FallThroughDiagPerFunction = !Diags.isIgnored(
3176 DiagID: diag::warn_unannotated_fallthrough_per_function, Loc: D->getBeginLoc());
3177 if (FallThroughDiagFull || FallThroughDiagPerFunction ||
3178 fscope->HasFallthroughStmt) {
3179 DiagnoseSwitchLabelsFallthrough(S, AC, PerFunction: !FallThroughDiagFull);
3180 }
3181
3182 if (S.getLangOpts().ObjCWeak &&
3183 !Diags.isIgnored(DiagID: diag::warn_arc_repeated_use_of_weak, Loc: D->getBeginLoc()))
3184 diagnoseRepeatedUseOfWeak(S, CurFn: fscope, D, PM: AC.getParentMap());
3185
3186
3187 // Check for infinite self-recursion in functions
3188 if (!Diags.isIgnored(DiagID: diag::warn_infinite_recursive_function,
3189 Loc: D->getBeginLoc())) {
3190 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
3191 checkRecursiveFunction(S, FD, Body, AC);
3192 }
3193 }
3194
3195 // Check for throw out of non-throwing function.
3196 if (!Diags.isIgnored(DiagID: diag::warn_throw_in_noexcept_func, Loc: D->getBeginLoc()))
3197 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D))
3198 if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
3199 checkThrowInNonThrowingFunc(S, FD, AC);
3200
3201 // If none of the previous checks caused a CFG build, trigger one here
3202 // for the logical error handler.
3203 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3204 AC.getCFG();
3205 }
3206
3207 // Clear any of our policy overrides.
3208 clearOverrides();
3209
3210 // Collect statistics about the CFG if it was built.
3211 if (S.CollectStats && AC.isCFGBuilt()) {
3212 ++NumFunctionsAnalyzed;
3213 if (CFG *cfg = AC.getCFG()) {
3214 // If we successfully built a CFG for this context, record some more
3215 // detail information about it.
3216 NumCFGBlocks += cfg->getNumBlockIDs();
3217 MaxCFGBlocksPerFunction = std::max(a: MaxCFGBlocksPerFunction,
3218 b: cfg->getNumBlockIDs());
3219 } else {
3220 ++NumFunctionsWithBadCFGs;
3221 }
3222 }
3223}
3224
3225void clang::sema::AnalysisBasedWarnings::PrintStats() const {
3226 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
3227
3228 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
3229 unsigned AvgCFGBlocksPerFunction =
3230 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
3231 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
3232 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
3233 << " " << NumCFGBlocks << " CFG blocks built.\n"
3234 << " " << AvgCFGBlocksPerFunction
3235 << " average CFG blocks per function.\n"
3236 << " " << MaxCFGBlocksPerFunction
3237 << " max CFG blocks per function.\n";
3238
3239 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
3240 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
3241 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
3242 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
3243 llvm::errs() << NumUninitAnalysisFunctions
3244 << " functions analyzed for uninitialiazed variables\n"
3245 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
3246 << " " << AvgUninitVariablesPerFunction
3247 << " average variables per function.\n"
3248 << " " << MaxUninitAnalysisVariablesPerFunction
3249 << " max variables per function.\n"
3250 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
3251 << " " << AvgUninitBlockVisitsPerFunction
3252 << " average block visits per function.\n"
3253 << " " << MaxUninitAnalysisBlockVisitsPerFunction
3254 << " max block visits per function.\n";
3255 clang::lifetimes::printStats(Stats: LSStats);
3256}
3257