1//=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines analysis_warnings::[Policy,Executor].
10// Together they are used by Sema to issue warnings based on inexpensive
11// static analysis algorithms in libAnalysis.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/Sema/AnalysisBasedWarnings.h"
16#include "TypeLocBuilder.h"
17#include "clang/AST/Decl.h"
18#include "clang/AST/DeclCXX.h"
19#include "clang/AST/DeclObjC.h"
20#include "clang/AST/DynamicRecursiveASTVisitor.h"
21#include "clang/AST/EvaluatedExprVisitor.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/AST/ExprObjC.h"
25#include "clang/AST/OperationKinds.h"
26#include "clang/AST/ParentMap.h"
27#include "clang/AST/StmtCXX.h"
28#include "clang/AST/StmtObjC.h"
29#include "clang/AST/StmtVisitor.h"
30#include "clang/AST/Type.h"
31#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
32#include "clang/Analysis/Analyses/CalledOnceCheck.h"
33#include "clang/Analysis/Analyses/Consumed.h"
34#include "clang/Analysis/Analyses/LifetimeSafety/LifetimeAnnotations.h"
35#include "clang/Analysis/Analyses/LifetimeSafety/LifetimeSafety.h"
36#include "clang/Analysis/Analyses/ReachableCode.h"
37#include "clang/Analysis/Analyses/ThreadSafety.h"
38#include "clang/Analysis/Analyses/UninitializedValues.h"
39#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
40#include "clang/Analysis/AnalysisDeclContext.h"
41#include "clang/Analysis/CFG.h"
42#include "clang/Analysis/CallGraph.h"
43#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
44#include "clang/Basic/Diagnostic.h"
45#include "clang/Basic/DiagnosticSema.h"
46#include "clang/Basic/SourceLocation.h"
47#include "clang/Basic/SourceManager.h"
48#include "clang/Lex/Preprocessor.h"
49#include "clang/Sema/ScopeInfo.h"
50#include "clang/Sema/SemaInternal.h"
51#include "llvm/ADT/ArrayRef.h"
52#include "llvm/ADT/BitVector.h"
53#include "llvm/ADT/DenseMap.h"
54#include "llvm/ADT/MapVector.h"
55#include "llvm/ADT/PostOrderIterator.h"
56#include "llvm/ADT/STLFunctionalExtras.h"
57#include "llvm/ADT/SmallVector.h"
58#include "llvm/ADT/StringRef.h"
59#include "llvm/Support/Debug.h"
60#include "llvm/Support/TimeProfiler.h"
61#include <algorithm>
62#include <deque>
63#include <iterator>
64#include <optional>
65
66using namespace clang;
67
68//===----------------------------------------------------------------------===//
69// Unreachable code analysis.
70//===----------------------------------------------------------------------===//
71
72namespace {
73 class UnreachableCodeHandler : public reachable_code::Callback {
74 Sema &S;
75 SourceRange PreviousSilenceableCondVal;
76
77 public:
78 UnreachableCodeHandler(Sema &s) : S(s) {}
79
80 void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
81 SourceRange SilenceableCondVal, SourceRange R1,
82 SourceRange R2, bool HasFallThroughAttr) override {
83 // If the diagnosed code is `[[fallthrough]];` and
84 // `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
85 // be executed` warning to avoid generating diagnostic twice
86 if (HasFallThroughAttr &&
87 !S.getDiagnostics().isIgnored(DiagID: diag::warn_unreachable_fallthrough_attr,
88 Loc: SourceLocation()))
89 return;
90
91 // Avoid reporting multiple unreachable code diagnostics that are
92 // triggered by the same conditional value.
93 if (PreviousSilenceableCondVal.isValid() &&
94 SilenceableCondVal.isValid() &&
95 PreviousSilenceableCondVal == SilenceableCondVal)
96 return;
97 PreviousSilenceableCondVal = SilenceableCondVal;
98
99 unsigned diag = diag::warn_unreachable;
100 switch (UK) {
101 case reachable_code::UK_Break:
102 diag = diag::warn_unreachable_break;
103 break;
104 case reachable_code::UK_Return:
105 diag = diag::warn_unreachable_return;
106 break;
107 case reachable_code::UK_Loop_Increment:
108 diag = diag::warn_unreachable_loop_increment;
109 break;
110 case reachable_code::UK_Other:
111 break;
112 }
113
114 S.Diag(Loc: L, DiagID: diag) << R1 << R2;
115
116 SourceLocation Open = SilenceableCondVal.getBegin();
117 if (Open.isValid()) {
118 SourceLocation Close = SilenceableCondVal.getEnd();
119 Close = S.getLocForEndOfToken(Loc: Close);
120 if (Close.isValid()) {
121 S.Diag(Loc: Open, DiagID: diag::note_unreachable_silence)
122 << FixItHint::CreateInsertion(InsertionLoc: Open, Code: "/* DISABLES CODE */ (")
123 << FixItHint::CreateInsertion(InsertionLoc: Close, Code: ")");
124 }
125 }
126 }
127 };
128} // anonymous namespace
129
130/// CheckUnreachable - Check for unreachable code.
131static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
132 // As a heuristic prune all diagnostics not in the main file. Currently
133 // the majority of warnings in headers are false positives. These
134 // are largely caused by configuration state, e.g. preprocessor
135 // defined code, etc.
136 //
137 // Note that this is also a performance optimization. Analyzing
138 // headers many times can be expensive.
139 if (!S.getSourceManager().isInMainFile(Loc: AC.getDecl()->getBeginLoc()))
140 return;
141
142 UnreachableCodeHandler UC(S);
143 reachable_code::FindUnreachableCode(AC, PP&: S.getPreprocessor(), CB&: UC);
144}
145
146namespace {
147/// Warn on logical operator errors in CFGBuilder
148class LogicalErrorHandler : public CFGCallback {
149 Sema &S;
150
151public:
152 LogicalErrorHandler(Sema &S) : S(S) {}
153
154 static bool HasMacroID(const Expr *E) {
155 if (E->getExprLoc().isMacroID())
156 return true;
157
158 // Recurse to children.
159 for (const Stmt *SubStmt : E->children())
160 if (const Expr *SubExpr = dyn_cast_or_null<Expr>(Val: SubStmt))
161 if (HasMacroID(E: SubExpr))
162 return true;
163
164 return false;
165 }
166
167 void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
168 if (HasMacroID(E: B))
169 return;
170
171 unsigned DiagID = isAlwaysTrue
172 ? diag::warn_tautological_negation_or_compare
173 : diag::warn_tautological_negation_and_compare;
174 SourceRange DiagRange = B->getSourceRange();
175 S.Diag(Loc: B->getExprLoc(), DiagID) << DiagRange;
176 }
177
178 void compareAlwaysTrue(const BinaryOperator *B,
179 bool isAlwaysTrueOrFalse) override {
180 if (HasMacroID(E: B))
181 return;
182
183 SourceRange DiagRange = B->getSourceRange();
184 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_tautological_overlap_comparison)
185 << DiagRange << isAlwaysTrueOrFalse;
186 }
187
188 void compareBitwiseEquality(const BinaryOperator *B,
189 bool isAlwaysTrue) override {
190 if (HasMacroID(E: B))
191 return;
192
193 SourceRange DiagRange = B->getSourceRange();
194 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_always)
195 << DiagRange << isAlwaysTrue;
196 }
197
198 void compareBitwiseOr(const BinaryOperator *B) override {
199 if (HasMacroID(E: B))
200 return;
201
202 SourceRange DiagRange = B->getSourceRange();
203 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_or) << DiagRange;
204 }
205
206 static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
207 SourceLocation Loc) {
208 return !Diags.isIgnored(DiagID: diag::warn_tautological_overlap_comparison, Loc) ||
209 !Diags.isIgnored(DiagID: diag::warn_comparison_bitwise_or, Loc) ||
210 !Diags.isIgnored(DiagID: diag::warn_tautological_negation_and_compare, Loc);
211 }
212};
213} // anonymous namespace
214
215//===----------------------------------------------------------------------===//
216// Check for infinite self-recursion in functions
217//===----------------------------------------------------------------------===//
218
219// Returns true if the function is called anywhere within the CFGBlock.
220// For member functions, the additional condition of being call from the
221// this pointer is required.
222static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
223 // Process all the Stmt's in this block to find any calls to FD.
224 for (const auto &B : Block) {
225 if (B.getKind() != CFGElement::Statement)
226 continue;
227
228 const CallExpr *CE = dyn_cast<CallExpr>(Val: B.getAs<CFGStmt>()->getStmt());
229 if (!CE || !CE->getCalleeDecl() ||
230 CE->getCalleeDecl()->getCanonicalDecl() != FD)
231 continue;
232
233 // Skip function calls which are qualified with a templated class.
234 if (const DeclRefExpr *DRE =
235 dyn_cast<DeclRefExpr>(Val: CE->getCallee()->IgnoreParenImpCasts()))
236 if (NestedNameSpecifier NNS = DRE->getQualifier();
237 NNS.getKind() == NestedNameSpecifier::Kind::Type)
238 if (isa_and_nonnull<TemplateSpecializationType>(Val: NNS.getAsType()))
239 continue;
240
241 const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(Val: CE);
242 if (!MCE || isa<CXXThisExpr>(Val: MCE->getImplicitObjectArgument()) ||
243 !MCE->getMethodDecl()->isVirtual())
244 return true;
245 }
246 return false;
247}
248
249// Returns true if every path from the entry block passes through a call to FD.
250static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
251 llvm::SmallPtrSet<CFGBlock *, 16> Visited;
252 llvm::SmallVector<CFGBlock *, 16> WorkList;
253 // Keep track of whether we found at least one recursive path.
254 bool foundRecursion = false;
255
256 const unsigned ExitID = cfg->getExit().getBlockID();
257
258 // Seed the work list with the entry block.
259 WorkList.push_back(Elt: &cfg->getEntry());
260
261 while (!WorkList.empty()) {
262 CFGBlock *Block = WorkList.pop_back_val();
263
264 for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
265 if (CFGBlock *SuccBlock = *I) {
266 if (!Visited.insert(Ptr: SuccBlock).second)
267 continue;
268
269 // Found a path to the exit node without a recursive call.
270 if (ExitID == SuccBlock->getBlockID())
271 return false;
272
273 // If the successor block contains a recursive call, end analysis there.
274 if (hasRecursiveCallInPath(FD, Block&: *SuccBlock)) {
275 foundRecursion = true;
276 continue;
277 }
278
279 WorkList.push_back(Elt: SuccBlock);
280 }
281 }
282 }
283 return foundRecursion;
284}
285
286static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
287 const Stmt *Body, AnalysisDeclContext &AC) {
288 FD = FD->getCanonicalDecl();
289
290 // Only run on non-templated functions and non-templated members of
291 // templated classes.
292 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
293 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
294 return;
295
296 CFG *cfg = AC.getCFG();
297 if (!cfg) return;
298
299 // If the exit block is unreachable, skip processing the function.
300 if (cfg->getExit().pred_empty())
301 return;
302
303 // Emit diagnostic if a recursive function call is detected for all paths.
304 if (checkForRecursiveFunctionCall(FD, cfg))
305 S.Diag(Loc: Body->getBeginLoc(), DiagID: diag::warn_infinite_recursive_function);
306}
307
308//===----------------------------------------------------------------------===//
309// Check for throw in a non-throwing function.
310//===----------------------------------------------------------------------===//
311
312/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
313/// can reach ExitBlock.
314static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
315 CFG *Body) {
316 SmallVector<CFGBlock *, 16> Stack;
317 llvm::BitVector Queued(Body->getNumBlockIDs());
318
319 Stack.push_back(Elt: &ThrowBlock);
320 Queued[ThrowBlock.getBlockID()] = true;
321
322 while (!Stack.empty()) {
323 CFGBlock &UnwindBlock = *Stack.pop_back_val();
324
325 for (auto &Succ : UnwindBlock.succs()) {
326 if (!Succ.isReachable() || Queued[Succ->getBlockID()])
327 continue;
328
329 if (Succ->getBlockID() == Body->getExit().getBlockID())
330 return true;
331
332 if (auto *Catch =
333 dyn_cast_or_null<CXXCatchStmt>(Val: Succ->getLabel())) {
334 QualType Caught = Catch->getCaughtType();
335 if (Caught.isNull() || // catch (...) catches everything
336 !E->getSubExpr() || // throw; is considered cuaght by any handler
337 S.handlerCanCatch(HandlerType: Caught, ExceptionType: E->getSubExpr()->getType()))
338 // Exception doesn't escape via this path.
339 break;
340 } else {
341 Stack.push_back(Elt: Succ);
342 Queued[Succ->getBlockID()] = true;
343 }
344 }
345 }
346
347 return false;
348}
349
350static void visitReachableThrows(
351 CFG *BodyCFG,
352 llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
353 llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
354 clang::reachable_code::ScanReachableFromBlock(Start: &BodyCFG->getEntry(), Reachable);
355 for (CFGBlock *B : *BodyCFG) {
356 if (!Reachable[B->getBlockID()])
357 continue;
358 for (CFGElement &E : *B) {
359 std::optional<CFGStmt> S = E.getAs<CFGStmt>();
360 if (!S)
361 continue;
362 if (auto *Throw = dyn_cast<CXXThrowExpr>(Val: S->getStmt()))
363 Visit(Throw, *B);
364 }
365 }
366}
367
368static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
369 const FunctionDecl *FD) {
370 if (!S.getSourceManager().isInSystemHeader(Loc: OpLoc) &&
371 FD->getTypeSourceInfo()) {
372 S.Diag(Loc: OpLoc, DiagID: diag::warn_throw_in_noexcept_func) << FD;
373 if (S.getLangOpts().CPlusPlus11 &&
374 (isa<CXXDestructorDecl>(Val: FD) ||
375 FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
376 FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
377 if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
378 getAs<FunctionProtoType>())
379 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_dtor)
380 << !isa<CXXDestructorDecl>(Val: FD) << !Ty->hasExceptionSpec()
381 << FD->getExceptionSpecSourceRange();
382 } else
383 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_function)
384 << FD->getExceptionSpecSourceRange();
385 }
386}
387
388static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
389 AnalysisDeclContext &AC) {
390 CFG *BodyCFG = AC.getCFG();
391 if (!BodyCFG)
392 return;
393 if (BodyCFG->getExit().pred_empty())
394 return;
395 visitReachableThrows(BodyCFG, Visit: [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
396 if (throwEscapes(S, E: Throw, ThrowBlock&: Block, Body: BodyCFG))
397 EmitDiagForCXXThrowInNonThrowingFunc(S, OpLoc: Throw->getThrowLoc(), FD);
398 });
399}
400
401static bool isNoexcept(const FunctionDecl *FD) {
402 const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
403 if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
404 return true;
405 return false;
406}
407
408/// Checks if the given expression is a reference to a function with
409/// 'noreturn' attribute.
410static bool isReferenceToNoReturn(const Expr *E) {
411 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: E->IgnoreParenCasts()))
412 if (auto *FD = dyn_cast<FunctionDecl>(Val: DRef->getDecl()))
413 return FD->isNoReturn();
414 return false;
415}
416
417/// Checks if the given variable, which is assumed to be a function pointer, is
418/// initialized with a function having 'noreturn' attribute.
419static bool isInitializedWithNoReturn(const VarDecl *VD) {
420 if (const Expr *Init = VD->getInit()) {
421 if (auto *ListInit = dyn_cast<InitListExpr>(Val: Init);
422 ListInit && ListInit->getNumInits() > 0)
423 Init = ListInit->getInit(Init: 0);
424 return isReferenceToNoReturn(E: Init);
425 }
426 return false;
427}
428
429namespace {
430
431/// Looks for statements, that can define value of the given variable.
432struct TransferFunctions : public StmtVisitor<TransferFunctions> {
433 const VarDecl *Var;
434 std::optional<bool> AllValuesAreNoReturn;
435
436 TransferFunctions(const VarDecl *VD) : Var(VD) {}
437
438 void reset() { AllValuesAreNoReturn = std::nullopt; }
439
440 void VisitDeclStmt(DeclStmt *DS) {
441 for (auto *DI : DS->decls())
442 if (auto *VD = dyn_cast<VarDecl>(Val: DI))
443 if (VarDecl *Def = VD->getDefinition())
444 if (Def == Var)
445 AllValuesAreNoReturn = isInitializedWithNoReturn(VD: Def);
446 }
447
448 void VisitUnaryOperator(UnaryOperator *UO) {
449 if (UO->getOpcode() == UO_AddrOf) {
450 if (auto *DRef =
451 dyn_cast<DeclRefExpr>(Val: UO->getSubExpr()->IgnoreParenCasts()))
452 if (DRef->getDecl() == Var)
453 AllValuesAreNoReturn = false;
454 }
455 }
456
457 void VisitBinaryOperator(BinaryOperator *BO) {
458 if (BO->getOpcode() == BO_Assign)
459 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: BO->getLHS()->IgnoreParenCasts()))
460 if (DRef->getDecl() == Var)
461 AllValuesAreNoReturn = isReferenceToNoReturn(E: BO->getRHS());
462 }
463
464 void VisitCallExpr(CallExpr *CE) {
465 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end(); I != E;
466 ++I) {
467 const Expr *Arg = *I;
468 if (Arg->isGLValue() && !Arg->getType().isConstQualified())
469 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: Arg->IgnoreParenCasts()))
470 if (auto VD = dyn_cast<VarDecl>(Val: DRef->getDecl()))
471 if (VD->getDefinition() == Var)
472 AllValuesAreNoReturn = false;
473 }
474 }
475};
476} // namespace
477
478// Checks if all possible values of the given variable are functions with
479// 'noreturn' attribute.
480static bool areAllValuesNoReturn(const VarDecl *VD, const CFGBlock &VarBlk,
481 AnalysisDeclContext &AC) {
482 // The set of possible values of a constant variable is determined by
483 // its initializer, unless it is a function parameter.
484 if (!isa<ParmVarDecl>(Val: VD) && VD->getType().isConstant(Ctx: AC.getASTContext())) {
485 if (const VarDecl *Def = VD->getDefinition())
486 return isInitializedWithNoReturn(VD: Def);
487 return false;
488 }
489
490 // In multithreaded environment the value of a global variable may be changed
491 // asynchronously.
492 if (!VD->getDeclContext()->isFunctionOrMethod())
493 return false;
494
495 // Check the condition "all values are noreturn". It is satisfied if the
496 // variable is set to "noreturn" value in the current block or all its
497 // predecessors satisfies the condition.
498 using MapTy = llvm::DenseMap<const CFGBlock *, std::optional<bool>>;
499 using ValueTy = MapTy::value_type;
500 MapTy BlocksToCheck;
501 BlocksToCheck[&VarBlk] = std::nullopt;
502 const auto BlockSatisfiesCondition = [](ValueTy Item) {
503 return Item.getSecond().value_or(u: false);
504 };
505
506 TransferFunctions TF(VD);
507 BackwardDataflowWorklist Worklist(*AC.getCFG(), AC);
508 llvm::DenseSet<const CFGBlock *> Visited;
509 Worklist.enqueueBlock(Block: &VarBlk);
510 while (const CFGBlock *B = Worklist.dequeue()) {
511 if (Visited.contains(V: B))
512 continue;
513 Visited.insert(V: B);
514 // First check the current block.
515 for (CFGBlock::const_reverse_iterator ri = B->rbegin(), re = B->rend();
516 ri != re; ++ri) {
517 if (std::optional<CFGStmt> cs = ri->getAs<CFGStmt>()) {
518 const Stmt *S = cs->getStmt();
519 TF.reset();
520 TF.Visit(S: const_cast<Stmt *>(S));
521 if (TF.AllValuesAreNoReturn) {
522 if (!TF.AllValuesAreNoReturn.value())
523 return false;
524 BlocksToCheck[B] = true;
525 break;
526 }
527 }
528 }
529
530 // If all checked blocks satisfy the condition, the check is finished.
531 if (llvm::all_of(Range&: BlocksToCheck, P: BlockSatisfiesCondition))
532 return true;
533
534 // If this block does not contain the variable definition, check
535 // its predecessors.
536 if (!BlocksToCheck[B]) {
537 Worklist.enqueuePredecessors(Block: B);
538 BlocksToCheck.erase(Val: B);
539 for (const auto &PredBlk : B->preds())
540 if (!BlocksToCheck.contains(Val: PredBlk))
541 BlocksToCheck[PredBlk] = std::nullopt;
542 }
543 }
544
545 return false;
546}
547
548//===----------------------------------------------------------------------===//
549// Check for missing return value.
550//===----------------------------------------------------------------------===//
551
552enum ControlFlowKind {
553 UnknownFallThrough,
554 NeverFallThrough,
555 MaybeFallThrough,
556 AlwaysFallThrough,
557 NeverFallThroughOrReturn
558};
559
560/// CheckFallThrough - Check that we don't fall off the end of a
561/// Statement that should return a value.
562///
563/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
564/// MaybeFallThrough iff we might or might not fall off the end,
565/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
566/// return. We assume NeverFallThrough iff we never fall off the end of the
567/// statement but we may return. We assume that functions not marked noreturn
568/// will return.
569static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
570 CFG *cfg = AC.getCFG();
571 if (!cfg) return UnknownFallThrough;
572
573 // The CFG leaves in dead things, and we don't want the dead code paths to
574 // confuse us, so we mark all live things first.
575 llvm::BitVector live(cfg->getNumBlockIDs());
576 unsigned count = reachable_code::ScanReachableFromBlock(Start: &cfg->getEntry(),
577 Reachable&: live);
578
579 bool AddEHEdges = AC.getAddEHEdges();
580 if (!AddEHEdges && count != cfg->getNumBlockIDs())
581 // When there are things remaining dead, and we didn't add EH edges
582 // from CallExprs to the catch clauses, we have to go back and
583 // mark them as live.
584 for (const auto *B : *cfg) {
585 if (!live[B->getBlockID()]) {
586 if (B->preds().empty()) {
587 const Stmt *Term = B->getTerminatorStmt();
588 if (isa_and_nonnull<CXXTryStmt>(Val: Term))
589 // When not adding EH edges from calls, catch clauses
590 // can otherwise seem dead. Avoid noting them as dead.
591 count += reachable_code::ScanReachableFromBlock(Start: B, Reachable&: live);
592 continue;
593 }
594 }
595 }
596
597 // Now we know what is live, we check the live precessors of the exit block
598 // and look for fall through paths, being careful to ignore normal returns,
599 // and exceptional paths.
600 bool HasLiveReturn = false;
601 bool HasFakeEdge = false;
602 bool HasPlainEdge = false;
603 bool HasAbnormalEdge = false;
604
605 // Ignore default cases that aren't likely to be reachable because all
606 // enums in a switch(X) have explicit case statements.
607 CFGBlock::FilterOptions FO;
608 FO.IgnoreDefaultsWithCoveredEnums = 1;
609
610 for (CFGBlock::filtered_pred_iterator I =
611 cfg->getExit().filtered_pred_start_end(f: FO);
612 I.hasMore(); ++I) {
613 const CFGBlock &B = **I;
614 if (!live[B.getBlockID()])
615 continue;
616
617 // Skip blocks which contain an element marked as no-return. They don't
618 // represent actually viable edges into the exit block, so mark them as
619 // abnormal.
620 if (B.hasNoReturnElement()) {
621 HasAbnormalEdge = true;
622 continue;
623 }
624
625 // Destructors can appear after the 'return' in the CFG. This is
626 // normal. We need to look pass the destructors for the return
627 // statement (if it exists).
628 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
629
630 for ( ; ri != re ; ++ri)
631 if (ri->getAs<CFGStmt>())
632 break;
633
634 // No more CFGElements in the block?
635 if (ri == re) {
636 const Stmt *Term = B.getTerminatorStmt();
637 if (Term && (isa<CXXTryStmt>(Val: Term) || isa<ObjCAtTryStmt>(Val: Term))) {
638 HasAbnormalEdge = true;
639 continue;
640 }
641 // A labeled empty statement, or the entry block...
642 HasPlainEdge = true;
643 continue;
644 }
645
646 CFGStmt CS = ri->castAs<CFGStmt>();
647 const Stmt *S = CS.getStmt();
648 if (isa<ReturnStmt>(Val: S) || isa<CoreturnStmt>(Val: S)) {
649 HasLiveReturn = true;
650 continue;
651 }
652 if (isa<ObjCAtThrowStmt>(Val: S)) {
653 HasFakeEdge = true;
654 continue;
655 }
656 if (isa<CXXThrowExpr>(Val: S)) {
657 HasFakeEdge = true;
658 continue;
659 }
660 if (isa<MSAsmStmt>(Val: S)) {
661 // TODO: Verify this is correct.
662 HasFakeEdge = true;
663 HasLiveReturn = true;
664 continue;
665 }
666 if (isa<CXXTryStmt>(Val: S)) {
667 HasAbnormalEdge = true;
668 continue;
669 }
670 if (!llvm::is_contained(Range: B.succs(), Element: &cfg->getExit())) {
671 HasAbnormalEdge = true;
672 continue;
673 }
674 if (auto *Call = dyn_cast<CallExpr>(Val: S)) {
675 const Expr *Callee = Call->getCallee();
676 if (Callee->getType()->isPointerType())
677 if (auto *DeclRef =
678 dyn_cast<DeclRefExpr>(Val: Callee->IgnoreParenImpCasts()))
679 if (auto *VD = dyn_cast<VarDecl>(Val: DeclRef->getDecl()))
680 if (areAllValuesNoReturn(VD, VarBlk: B, AC)) {
681 HasAbnormalEdge = true;
682 continue;
683 }
684 }
685
686 HasPlainEdge = true;
687 }
688 if (!HasPlainEdge) {
689 if (HasLiveReturn)
690 return NeverFallThrough;
691 return NeverFallThroughOrReturn;
692 }
693 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
694 return MaybeFallThrough;
695 // This says AlwaysFallThrough for calls to functions that are not marked
696 // noreturn, that don't return. If people would like this warning to be more
697 // accurate, such functions should be marked as noreturn.
698 return AlwaysFallThrough;
699}
700
701namespace {
702
703struct CheckFallThroughDiagnostics {
704 unsigned diag_FallThrough_HasNoReturn = 0;
705 unsigned diag_FallThrough_ReturnsNonVoid = 0;
706 unsigned diag_NeverFallThroughOrReturn = 0;
707 unsigned FunKind; // TODO: use diag::FalloffFunctionKind
708 SourceLocation FuncLoc;
709
710 static CheckFallThroughDiagnostics MakeForFunction(Sema &S,
711 const Decl *Func) {
712 CheckFallThroughDiagnostics D;
713 D.FuncLoc = Func->getLocation();
714 D.diag_FallThrough_HasNoReturn = diag::warn_noreturn_has_return_expr;
715 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
716
717 // Don't suggest that virtual functions be marked "noreturn", since they
718 // might be overridden by non-noreturn functions.
719 bool isVirtualMethod = false;
720 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Val: Func))
721 isVirtualMethod = Method->isVirtual();
722
723 // Don't suggest that template instantiations be marked "noreturn"
724 bool isTemplateInstantiation = false;
725 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: Func)) {
726 isTemplateInstantiation = Function->isTemplateInstantiation();
727 if (!S.getLangOpts().CPlusPlus && !S.getLangOpts().C99 &&
728 Function->isMain()) {
729 D.diag_FallThrough_ReturnsNonVoid = diag::ext_main_no_return;
730 }
731 }
732
733 if (!isVirtualMethod && !isTemplateInstantiation)
734 D.diag_NeverFallThroughOrReturn = diag::warn_suggest_noreturn_function;
735
736 D.FunKind = diag::FalloffFunctionKind::Function;
737 return D;
738 }
739
740 static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
741 CheckFallThroughDiagnostics D;
742 D.FuncLoc = Func->getLocation();
743 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
744 D.FunKind = diag::FalloffFunctionKind::Coroutine;
745 return D;
746 }
747
748 static CheckFallThroughDiagnostics MakeForBlock() {
749 CheckFallThroughDiagnostics D;
750 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
751 D.diag_FallThrough_ReturnsNonVoid = diag::err_falloff_nonvoid;
752 D.FunKind = diag::FalloffFunctionKind::Block;
753 return D;
754 }
755
756 static CheckFallThroughDiagnostics MakeForLambda() {
757 CheckFallThroughDiagnostics D;
758 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
759 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
760 D.FunKind = diag::FalloffFunctionKind::Lambda;
761 return D;
762 }
763
764 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
765 bool HasNoReturn) const {
766 if (FunKind == diag::FalloffFunctionKind::Function) {
767 return (ReturnsVoid ||
768 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
769 (!HasNoReturn ||
770 D.isIgnored(DiagID: diag::warn_noreturn_has_return_expr, Loc: FuncLoc)) &&
771 (!ReturnsVoid ||
772 D.isIgnored(DiagID: diag::warn_suggest_noreturn_block, Loc: FuncLoc));
773 }
774 if (FunKind == diag::FalloffFunctionKind::Coroutine) {
775 return (ReturnsVoid ||
776 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
777 (!HasNoReturn);
778 }
779 // For blocks / lambdas.
780 return ReturnsVoid && !HasNoReturn;
781 }
782};
783
784} // anonymous namespace
785
786/// CheckFallThroughForBody - Check that we don't fall off the end of a
787/// function that should return a value. Check that we don't fall off the end
788/// of a noreturn function. We assume that functions and blocks not marked
789/// noreturn will return.
790static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
791 QualType BlockType,
792 const CheckFallThroughDiagnostics &CD,
793 AnalysisDeclContext &AC) {
794
795 bool ReturnsVoid = false;
796 bool HasNoReturn = false;
797
798 if (const auto *FD = dyn_cast<FunctionDecl>(Val: D)) {
799 if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Val: Body))
800 ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
801 else
802 ReturnsVoid = FD->getReturnType()->isVoidType();
803 HasNoReturn = FD->isNoReturn() || FD->hasAttr<InferredNoReturnAttr>();
804 }
805 else if (const auto *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
806 ReturnsVoid = MD->getReturnType()->isVoidType();
807 HasNoReturn = MD->hasAttr<NoReturnAttr>();
808 }
809 else if (isa<BlockDecl>(Val: D)) {
810 if (const FunctionType *FT =
811 BlockType->getPointeeType()->getAs<FunctionType>()) {
812 if (FT->getReturnType()->isVoidType())
813 ReturnsVoid = true;
814 if (FT->getNoReturnAttr())
815 HasNoReturn = true;
816 }
817 }
818
819 DiagnosticsEngine &Diags = S.getDiagnostics();
820
821 // Short circuit for compilation speed.
822 if (CD.checkDiagnostics(D&: Diags, ReturnsVoid, HasNoReturn))
823 return;
824 SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
825
826 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
827 if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
828 return;
829
830 // Either in a function body compound statement, or a function-try-block.
831 switch (int FallThroughType = CheckFallThrough(AC)) {
832 case UnknownFallThrough:
833 break;
834
835 case MaybeFallThrough:
836 case AlwaysFallThrough:
837 if (HasNoReturn) {
838 if (CD.diag_FallThrough_HasNoReturn)
839 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_HasNoReturn) << CD.FunKind;
840 } else if (!ReturnsVoid && CD.diag_FallThrough_ReturnsNonVoid) {
841 // If the final statement is a call to an always-throwing function,
842 // don't warn about the fall-through.
843 if (D->getAsFunction()) {
844 if (const auto *CS = dyn_cast<CompoundStmt>(Val: Body);
845 CS && !CS->body_empty()) {
846 const Stmt *LastStmt = CS->body_back();
847 // Unwrap ExprWithCleanups if necessary.
848 if (const auto *EWC = dyn_cast<ExprWithCleanups>(Val: LastStmt)) {
849 LastStmt = EWC->getSubExpr();
850 }
851 if (const auto *CE = dyn_cast<CallExpr>(Val: LastStmt)) {
852 if (const FunctionDecl *Callee = CE->getDirectCallee();
853 Callee && Callee->hasAttr<InferredNoReturnAttr>()) {
854 return; // Don't warn about fall-through.
855 }
856 }
857 // Direct throw.
858 if (isa<CXXThrowExpr>(Val: LastStmt)) {
859 return; // Don't warn about fall-through.
860 }
861 }
862 }
863 bool NotInAllControlPaths = FallThroughType == MaybeFallThrough;
864 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_ReturnsNonVoid)
865 << CD.FunKind << NotInAllControlPaths;
866 }
867 break;
868 case NeverFallThroughOrReturn:
869 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
870 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
871 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 0 << FD;
872 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
873 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 1 << MD;
874 } else {
875 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn);
876 }
877 }
878 break;
879 case NeverFallThrough:
880 break;
881 }
882}
883
884//===----------------------------------------------------------------------===//
885// -Wuninitialized
886//===----------------------------------------------------------------------===//
887
888namespace {
889/// ContainsReference - A visitor class to search for references to
890/// a particular declaration (the needle) within any evaluated component of an
891/// expression (recursively).
892class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
893 bool FoundReference;
894 const DeclRefExpr *Needle;
895
896public:
897 typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
898
899 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
900 : Inherited(Context), FoundReference(false), Needle(Needle) {}
901
902 void VisitExpr(const Expr *E) {
903 // Stop evaluating if we already have a reference.
904 if (FoundReference)
905 return;
906
907 Inherited::VisitExpr(S: E);
908 }
909
910 void VisitDeclRefExpr(const DeclRefExpr *E) {
911 if (E == Needle)
912 FoundReference = true;
913 else
914 Inherited::VisitDeclRefExpr(E);
915 }
916
917 bool doesContainReference() const { return FoundReference; }
918};
919} // anonymous namespace
920
921static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
922 QualType VariableTy = VD->getType().getCanonicalType();
923 if (VariableTy->isBlockPointerType() &&
924 !VD->hasAttr<BlocksAttr>()) {
925 S.Diag(Loc: VD->getLocation(), DiagID: diag::note_block_var_fixit_add_initialization)
926 << VD->getDeclName()
927 << FixItHint::CreateInsertion(InsertionLoc: VD->getLocation(), Code: "__block ");
928 return true;
929 }
930
931 // Don't issue a fixit if there is already an initializer.
932 if (VD->getInit())
933 return false;
934
935 // Don't suggest a fixit inside macros.
936 if (VD->getEndLoc().isMacroID())
937 return false;
938
939 SourceLocation Loc = S.getLocForEndOfToken(Loc: VD->getEndLoc());
940
941 // Suggest possible initialization (if any).
942 std::string Init = S.getFixItZeroInitializerForType(T: VariableTy, Loc);
943 if (Init.empty())
944 return false;
945
946 S.Diag(Loc, DiagID: diag::note_var_fixit_add_initialization) << VD->getDeclName()
947 << FixItHint::CreateInsertion(InsertionLoc: Loc, Code: Init);
948 return true;
949}
950
951/// Create a fixit to remove an if-like statement, on the assumption that its
952/// condition is CondVal.
953static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
954 const Stmt *Else, bool CondVal,
955 FixItHint &Fixit1, FixItHint &Fixit2) {
956 if (CondVal) {
957 // If condition is always true, remove all but the 'then'.
958 Fixit1 = FixItHint::CreateRemoval(
959 RemoveRange: CharSourceRange::getCharRange(B: If->getBeginLoc(), E: Then->getBeginLoc()));
960 if (Else) {
961 SourceLocation ElseKwLoc = S.getLocForEndOfToken(Loc: Then->getEndLoc());
962 Fixit2 =
963 FixItHint::CreateRemoval(RemoveRange: SourceRange(ElseKwLoc, Else->getEndLoc()));
964 }
965 } else {
966 // If condition is always false, remove all but the 'else'.
967 if (Else)
968 Fixit1 = FixItHint::CreateRemoval(RemoveRange: CharSourceRange::getCharRange(
969 B: If->getBeginLoc(), E: Else->getBeginLoc()));
970 else
971 Fixit1 = FixItHint::CreateRemoval(RemoveRange: If->getSourceRange());
972 }
973}
974
975/// DiagUninitUse -- Helper function to produce a diagnostic for an
976/// uninitialized use of a variable.
977static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
978 bool IsCapturedByBlock) {
979 bool Diagnosed = false;
980
981 switch (Use.getKind()) {
982 case UninitUse::Always:
983 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_var)
984 << VD->getDeclName() << IsCapturedByBlock
985 << Use.getUser()->getSourceRange();
986 return;
987
988 case UninitUse::AfterDecl:
989 case UninitUse::AfterCall:
990 S.Diag(Loc: VD->getLocation(), DiagID: diag::warn_sometimes_uninit_var)
991 << VD->getDeclName() << IsCapturedByBlock
992 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
993 << VD->getLexicalDeclContext() << VD->getSourceRange();
994 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::note_uninit_var_use)
995 << IsCapturedByBlock << Use.getUser()->getSourceRange();
996 return;
997
998 case UninitUse::Maybe:
999 case UninitUse::Sometimes:
1000 // Carry on to report sometimes-uninitialized branches, if possible,
1001 // or a 'may be used uninitialized' diagnostic otherwise.
1002 break;
1003 }
1004
1005 // Diagnose each branch which leads to a sometimes-uninitialized use.
1006 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
1007 I != E; ++I) {
1008 assert(Use.getKind() == UninitUse::Sometimes);
1009
1010 const Expr *User = Use.getUser();
1011 const Stmt *Term = I->Terminator;
1012
1013 // Information used when building the diagnostic.
1014 unsigned DiagKind;
1015 StringRef Str;
1016 SourceRange Range;
1017
1018 // FixIts to suppress the diagnostic by removing the dead condition.
1019 // For all binary terminators, branch 0 is taken if the condition is true,
1020 // and branch 1 is taken if the condition is false.
1021 int RemoveDiagKind = -1;
1022 const char *FixitStr =
1023 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
1024 : (I->Output ? "1" : "0");
1025 FixItHint Fixit1, Fixit2;
1026
1027 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
1028 default:
1029 // Don't know how to report this. Just fall back to 'may be used
1030 // uninitialized'. FIXME: Can this happen?
1031 continue;
1032
1033 // "condition is true / condition is false".
1034 case Stmt::IfStmtClass: {
1035 const IfStmt *IS = cast<IfStmt>(Val: Term);
1036 DiagKind = 0;
1037 Str = "if";
1038 Range = IS->getCond()->getSourceRange();
1039 RemoveDiagKind = 0;
1040 CreateIfFixit(S, If: IS, Then: IS->getThen(), Else: IS->getElse(),
1041 CondVal: I->Output, Fixit1, Fixit2);
1042 break;
1043 }
1044 case Stmt::ConditionalOperatorClass: {
1045 const ConditionalOperator *CO = cast<ConditionalOperator>(Val: Term);
1046 DiagKind = 0;
1047 Str = "?:";
1048 Range = CO->getCond()->getSourceRange();
1049 RemoveDiagKind = 0;
1050 CreateIfFixit(S, If: CO, Then: CO->getTrueExpr(), Else: CO->getFalseExpr(),
1051 CondVal: I->Output, Fixit1, Fixit2);
1052 break;
1053 }
1054 case Stmt::BinaryOperatorClass: {
1055 const BinaryOperator *BO = cast<BinaryOperator>(Val: Term);
1056 if (!BO->isLogicalOp())
1057 continue;
1058 DiagKind = 0;
1059 Str = BO->getOpcodeStr();
1060 Range = BO->getLHS()->getSourceRange();
1061 RemoveDiagKind = 0;
1062 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
1063 (BO->getOpcode() == BO_LOr && !I->Output))
1064 // true && y -> y, false || y -> y.
1065 Fixit1 = FixItHint::CreateRemoval(
1066 RemoveRange: SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
1067 else
1068 // false && y -> false, true || y -> true.
1069 Fixit1 = FixItHint::CreateReplacement(RemoveRange: BO->getSourceRange(), Code: FixitStr);
1070 break;
1071 }
1072
1073 // "loop is entered / loop is exited".
1074 case Stmt::WhileStmtClass:
1075 DiagKind = 1;
1076 Str = "while";
1077 Range = cast<WhileStmt>(Val: Term)->getCond()->getSourceRange();
1078 RemoveDiagKind = 1;
1079 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1080 break;
1081 case Stmt::ForStmtClass:
1082 DiagKind = 1;
1083 Str = "for";
1084 Range = cast<ForStmt>(Val: Term)->getCond()->getSourceRange();
1085 RemoveDiagKind = 1;
1086 if (I->Output)
1087 Fixit1 = FixItHint::CreateRemoval(RemoveRange: Range);
1088 else
1089 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1090 break;
1091 case Stmt::CXXForRangeStmtClass:
1092 if (I->Output == 1) {
1093 // The use occurs if a range-based for loop's body never executes.
1094 // That may be impossible, and there's no syntactic fix for this,
1095 // so treat it as a 'may be uninitialized' case.
1096 continue;
1097 }
1098 DiagKind = 1;
1099 Str = "for";
1100 Range = cast<CXXForRangeStmt>(Val: Term)->getRangeInit()->getSourceRange();
1101 break;
1102
1103 // "condition is true / loop is exited".
1104 case Stmt::DoStmtClass:
1105 DiagKind = 2;
1106 Str = "do";
1107 Range = cast<DoStmt>(Val: Term)->getCond()->getSourceRange();
1108 RemoveDiagKind = 1;
1109 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1110 break;
1111
1112 // "switch case is taken".
1113 case Stmt::CaseStmtClass:
1114 DiagKind = 3;
1115 Str = "case";
1116 Range = cast<CaseStmt>(Val: Term)->getLHS()->getSourceRange();
1117 break;
1118 case Stmt::DefaultStmtClass:
1119 DiagKind = 3;
1120 Str = "default";
1121 Range = cast<DefaultStmt>(Val: Term)->getDefaultLoc();
1122 break;
1123 }
1124
1125 S.Diag(Loc: Range.getBegin(), DiagID: diag::warn_sometimes_uninit_var)
1126 << VD->getDeclName() << IsCapturedByBlock << DiagKind
1127 << Str << I->Output << Range;
1128 S.Diag(Loc: User->getBeginLoc(), DiagID: diag::note_uninit_var_use)
1129 << IsCapturedByBlock << User->getSourceRange();
1130 if (RemoveDiagKind != -1)
1131 S.Diag(Loc: Fixit1.RemoveRange.getBegin(), DiagID: diag::note_uninit_fixit_remove_cond)
1132 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
1133
1134 Diagnosed = true;
1135 }
1136
1137 if (!Diagnosed)
1138 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_maybe_uninit_var)
1139 << VD->getDeclName() << IsCapturedByBlock
1140 << Use.getUser()->getSourceRange();
1141}
1142
1143/// Diagnose uninitialized const reference usages.
1144static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1145 const UninitUse &Use) {
1146 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_reference)
1147 << VD->getDeclName() << Use.getUser()->getSourceRange();
1148 return !S.getDiagnostics().isLastDiagnosticIgnored();
1149}
1150
1151/// Diagnose uninitialized const pointer usages.
1152static bool DiagnoseUninitializedConstPtrUse(Sema &S, const VarDecl *VD,
1153 const UninitUse &Use) {
1154 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_pointer)
1155 << VD->getDeclName() << Use.getUser()->getSourceRange();
1156 return !S.getDiagnostics().isLastDiagnosticIgnored();
1157}
1158
1159/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1160/// uninitialized variable. This manages the different forms of diagnostic
1161/// emitted for particular types of uses. Returns true if the use was diagnosed
1162/// as a warning. If a particular use is one we omit warnings for, returns
1163/// false.
1164static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1165 const UninitUse &Use,
1166 bool alwaysReportSelfInit = false) {
1167 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Val: Use.getUser())) {
1168 // Inspect the initializer of the variable declaration which is
1169 // being referenced prior to its initialization. We emit
1170 // specialized diagnostics for self-initialization, and we
1171 // specifically avoid warning about self references which take the
1172 // form of:
1173 //
1174 // int x = x;
1175 //
1176 // This is used to indicate to GCC that 'x' is intentionally left
1177 // uninitialized. Proven code paths which access 'x' in
1178 // an uninitialized state after this will still warn.
1179 if (const Expr *Initializer = VD->getInit()) {
1180 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1181 return false;
1182
1183 ContainsReference CR(S.Context, DRE);
1184 CR.Visit(S: Initializer);
1185 if (CR.doesContainReference()) {
1186 S.Diag(Loc: DRE->getBeginLoc(), DiagID: diag::warn_uninit_self_reference_in_init)
1187 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1188 return !S.getDiagnostics().isLastDiagnosticIgnored();
1189 }
1190 }
1191
1192 DiagUninitUse(S, VD, Use, IsCapturedByBlock: false);
1193 } else {
1194 const BlockExpr *BE = cast<BlockExpr>(Val: Use.getUser());
1195 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1196 S.Diag(Loc: BE->getBeginLoc(),
1197 DiagID: diag::warn_uninit_byref_blockvar_captured_by_block)
1198 << VD->getDeclName()
1199 << VD->getType().getQualifiers().hasObjCLifetime();
1200 else
1201 DiagUninitUse(S, VD, Use, IsCapturedByBlock: true);
1202 }
1203
1204 // Report where the variable was declared when the use wasn't within
1205 // the initializer of that declaration & we didn't already suggest
1206 // an initialization fixit.
1207 if (!SuggestInitializationFixit(S, VD))
1208 S.Diag(Loc: VD->getBeginLoc(), DiagID: diag::note_var_declared_here)
1209 << VD->getDeclName();
1210
1211 return !S.getDiagnostics().isLastDiagnosticIgnored();
1212}
1213
1214namespace {
1215class FallthroughMapper : public DynamicRecursiveASTVisitor {
1216public:
1217 FallthroughMapper(Sema &S) : FoundSwitchStatements(false), S(S) {
1218 ShouldWalkTypesOfTypeLocs = false;
1219 }
1220
1221 bool foundSwitchStatements() const { return FoundSwitchStatements; }
1222
1223 void markFallthroughVisited(const AttributedStmt *Stmt) {
1224 bool Found = FallthroughStmts.erase(Ptr: Stmt);
1225 assert(Found);
1226 (void)Found;
1227 }
1228
1229 typedef llvm::SmallPtrSet<const AttributedStmt *, 8> AttrStmts;
1230
1231 const AttrStmts &getFallthroughStmts() const { return FallthroughStmts; }
1232
1233 void fillReachableBlocks(CFG *Cfg) {
1234 assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1235 std::deque<const CFGBlock *> BlockQueue;
1236
1237 ReachableBlocks.insert(Ptr: &Cfg->getEntry());
1238 BlockQueue.push_back(x: &Cfg->getEntry());
1239 // Mark all case blocks reachable to avoid problems with switching on
1240 // constants, covered enums, etc.
1241 // These blocks can contain fall-through annotations, and we don't want to
1242 // issue a warn_fallthrough_attr_unreachable for them.
1243 for (const auto *B : *Cfg) {
1244 const Stmt *L = B->getLabel();
1245 if (isa_and_nonnull<SwitchCase>(Val: L) && ReachableBlocks.insert(Ptr: B).second)
1246 BlockQueue.push_back(x: B);
1247 }
1248
1249 while (!BlockQueue.empty()) {
1250 const CFGBlock *P = BlockQueue.front();
1251 BlockQueue.pop_front();
1252 for (const CFGBlock *B : P->succs()) {
1253 if (B && ReachableBlocks.insert(Ptr: B).second)
1254 BlockQueue.push_back(x: B);
1255 }
1256 }
1257 }
1258
1259 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1260 bool IsTemplateInstantiation) {
1261 assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1262
1263 int UnannotatedCnt = 0;
1264 AnnotatedCnt = 0;
1265
1266 std::deque<const CFGBlock *> BlockQueue(B.pred_begin(), B.pred_end());
1267 while (!BlockQueue.empty()) {
1268 const CFGBlock *P = BlockQueue.front();
1269 BlockQueue.pop_front();
1270 if (!P)
1271 continue;
1272
1273 const Stmt *Term = P->getTerminatorStmt();
1274 if (isa_and_nonnull<SwitchStmt>(Val: Term))
1275 continue; // Switch statement, good.
1276
1277 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: P->getLabel());
1278 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1279 continue; // Previous case label has no statements, good.
1280
1281 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(Val: P->getLabel());
1282 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1283 continue; // Case label is preceded with a normal label, good.
1284
1285 if (!ReachableBlocks.count(Ptr: P)) {
1286 for (const CFGElement &Elem : llvm::reverse(C: *P)) {
1287 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1288 if (const AttributedStmt *AS = asFallThroughAttr(S: CS->getStmt())) {
1289 // Don't issue a warning for an unreachable fallthrough
1290 // attribute in template instantiations as it may not be
1291 // unreachable in all instantiations of the template.
1292 if (!IsTemplateInstantiation)
1293 S.Diag(Loc: AS->getBeginLoc(),
1294 DiagID: diag::warn_unreachable_fallthrough_attr);
1295 markFallthroughVisited(Stmt: AS);
1296 ++AnnotatedCnt;
1297 break;
1298 }
1299 // Don't care about other unreachable statements.
1300 }
1301 }
1302 // If there are no unreachable statements, this may be a special
1303 // case in CFG:
1304 // case X: {
1305 // A a; // A has a destructor.
1306 // break;
1307 // }
1308 // // <<<< This place is represented by a 'hanging' CFG block.
1309 // case Y:
1310 continue;
1311 }
1312
1313 const Stmt *LastStmt = getLastStmt(B: *P);
1314 if (const AttributedStmt *AS = asFallThroughAttr(S: LastStmt)) {
1315 markFallthroughVisited(Stmt: AS);
1316 ++AnnotatedCnt;
1317 continue; // Fallthrough annotation, good.
1318 }
1319
1320 if (!LastStmt) { // This block contains no executable statements.
1321 // Traverse its predecessors.
1322 std::copy(first: P->pred_begin(), last: P->pred_end(),
1323 result: std::back_inserter(x&: BlockQueue));
1324 continue;
1325 }
1326
1327 ++UnannotatedCnt;
1328 }
1329 return !!UnannotatedCnt;
1330 }
1331
1332 bool VisitAttributedStmt(AttributedStmt *S) override {
1333 if (asFallThroughAttr(S))
1334 FallthroughStmts.insert(Ptr: S);
1335 return true;
1336 }
1337
1338 bool VisitSwitchStmt(SwitchStmt *S) override {
1339 FoundSwitchStatements = true;
1340 return true;
1341 }
1342
1343 // We don't want to traverse local type declarations. We analyze their
1344 // methods separately.
1345 bool TraverseDecl(Decl *D) override { return true; }
1346
1347 // We analyze lambda bodies separately. Skip them here.
1348 bool TraverseLambdaExpr(LambdaExpr *LE) override {
1349 // Traverse the captures, but not the body.
1350 for (const auto C : zip(t: LE->captures(), u: LE->capture_inits()))
1351 TraverseLambdaCapture(LE, C: &std::get<0>(t: C), Init: std::get<1>(t: C));
1352 return true;
1353 }
1354
1355 private:
1356
1357 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1358 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(Val: S)) {
1359 if (hasSpecificAttr<FallThroughAttr>(container: AS->getAttrs()))
1360 return AS;
1361 }
1362 return nullptr;
1363 }
1364
1365 static const Stmt *getLastStmt(const CFGBlock &B) {
1366 if (const Stmt *Term = B.getTerminatorStmt())
1367 return Term;
1368 for (const CFGElement &Elem : llvm::reverse(C: B))
1369 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1370 return CS->getStmt();
1371 // Workaround to detect a statement thrown out by CFGBuilder:
1372 // case X: {} case Y:
1373 // case X: ; case Y:
1374 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: B.getLabel()))
1375 if (!isa<SwitchCase>(Val: SW->getSubStmt()))
1376 return SW->getSubStmt();
1377
1378 return nullptr;
1379 }
1380
1381 bool FoundSwitchStatements;
1382 AttrStmts FallthroughStmts;
1383 Sema &S;
1384 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1385};
1386} // anonymous namespace
1387
1388static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1389 SourceLocation Loc) {
1390 TokenValue FallthroughTokens[] = {
1391 tok::l_square, tok::l_square,
1392 PP.getIdentifierInfo(Name: "fallthrough"),
1393 tok::r_square, tok::r_square
1394 };
1395
1396 TokenValue ClangFallthroughTokens[] = {
1397 tok::l_square, tok::l_square, PP.getIdentifierInfo(Name: "clang"),
1398 tok::coloncolon, PP.getIdentifierInfo(Name: "fallthrough"),
1399 tok::r_square, tok::r_square
1400 };
1401
1402 bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1403
1404 StringRef MacroName;
1405 if (PreferClangAttr)
1406 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1407 if (MacroName.empty())
1408 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: FallthroughTokens);
1409 if (MacroName.empty() && !PreferClangAttr)
1410 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1411 if (MacroName.empty()) {
1412 if (!PreferClangAttr)
1413 MacroName = "[[fallthrough]]";
1414 else if (PP.getLangOpts().CPlusPlus)
1415 MacroName = "[[clang::fallthrough]]";
1416 else
1417 MacroName = "__attribute__((fallthrough))";
1418 }
1419 return MacroName;
1420}
1421
1422static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1423 bool PerFunction) {
1424 FallthroughMapper FM(S);
1425 FM.TraverseStmt(S: AC.getBody());
1426
1427 if (!FM.foundSwitchStatements())
1428 return;
1429
1430 if (PerFunction && FM.getFallthroughStmts().empty())
1431 return;
1432
1433 CFG *Cfg = AC.getCFG();
1434
1435 if (!Cfg)
1436 return;
1437
1438 FM.fillReachableBlocks(Cfg);
1439
1440 for (const CFGBlock *B : llvm::reverse(C&: *Cfg)) {
1441 const Stmt *Label = B->getLabel();
1442
1443 if (!isa_and_nonnull<SwitchCase>(Val: Label))
1444 continue;
1445
1446 int AnnotatedCnt;
1447
1448 bool IsTemplateInstantiation = false;
1449 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: AC.getDecl()))
1450 IsTemplateInstantiation = Function->isTemplateInstantiation();
1451 if (!FM.checkFallThroughIntoBlock(B: *B, AnnotatedCnt,
1452 IsTemplateInstantiation))
1453 continue;
1454
1455 S.Diag(Loc: Label->getBeginLoc(),
1456 DiagID: PerFunction ? diag::warn_unannotated_fallthrough_per_function
1457 : diag::warn_unannotated_fallthrough);
1458
1459 if (!AnnotatedCnt) {
1460 SourceLocation L = Label->getBeginLoc();
1461 if (L.isMacroID())
1462 continue;
1463
1464 const Stmt *Term = B->getTerminatorStmt();
1465 // Skip empty cases.
1466 while (B->empty() && !Term && B->succ_size() == 1) {
1467 B = *B->succ_begin();
1468 Term = B->getTerminatorStmt();
1469 }
1470 if (!(B->empty() && isa_and_nonnull<BreakStmt>(Val: Term))) {
1471 Preprocessor &PP = S.getPreprocessor();
1472 StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, Loc: L);
1473 SmallString<64> TextToInsert(AnnotationSpelling);
1474 TextToInsert += "; ";
1475 S.Diag(Loc: L, DiagID: diag::note_insert_fallthrough_fixit)
1476 << AnnotationSpelling
1477 << FixItHint::CreateInsertion(InsertionLoc: L, Code: TextToInsert);
1478 }
1479 S.Diag(Loc: L, DiagID: diag::note_insert_break_fixit)
1480 << FixItHint::CreateInsertion(InsertionLoc: L, Code: "break; ");
1481 }
1482 }
1483
1484 for (const auto *F : FM.getFallthroughStmts())
1485 S.Diag(Loc: F->getBeginLoc(), DiagID: diag::err_fallthrough_attr_invalid_placement);
1486}
1487
1488static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1489 const Stmt *S) {
1490 assert(S);
1491
1492 do {
1493 switch (S->getStmtClass()) {
1494 case Stmt::ForStmtClass:
1495 case Stmt::WhileStmtClass:
1496 case Stmt::CXXForRangeStmtClass:
1497 case Stmt::ObjCForCollectionStmtClass:
1498 return true;
1499 case Stmt::DoStmtClass: {
1500 Expr::EvalResult Result;
1501 if (!cast<DoStmt>(Val: S)->getCond()->EvaluateAsInt(Result, Ctx))
1502 return true;
1503 return Result.Val.getInt().getBoolValue();
1504 }
1505 default:
1506 break;
1507 }
1508 } while ((S = PM.getParent(S)));
1509
1510 return false;
1511}
1512
1513static void diagnoseRepeatedUseOfWeak(Sema &S,
1514 const sema::FunctionScopeInfo *CurFn,
1515 const Decl *D,
1516 const ParentMap &PM) {
1517 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1518 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1519 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1520 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1521 StmtUsesPair;
1522
1523 ASTContext &Ctx = S.getASTContext();
1524
1525 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1526
1527 // Extract all weak objects that are referenced more than once.
1528 SmallVector<StmtUsesPair, 8> UsesByStmt;
1529 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1530 I != E; ++I) {
1531 const WeakUseVector &Uses = I->second;
1532
1533 // Find the first read of the weak object.
1534 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1535 for ( ; UI != UE; ++UI) {
1536 if (UI->isUnsafe())
1537 break;
1538 }
1539
1540 // If there were only writes to this object, don't warn.
1541 if (UI == UE)
1542 continue;
1543
1544 // If there was only one read, followed by any number of writes, and the
1545 // read is not within a loop, don't warn. Additionally, don't warn in a
1546 // loop if the base object is a local variable -- local variables are often
1547 // changed in loops.
1548 if (UI == Uses.begin()) {
1549 WeakUseVector::const_iterator UI2 = UI;
1550 for (++UI2; UI2 != UE; ++UI2)
1551 if (UI2->isUnsafe())
1552 break;
1553
1554 if (UI2 == UE) {
1555 if (!isInLoop(Ctx, PM, S: UI->getUseExpr()))
1556 continue;
1557
1558 const WeakObjectProfileTy &Profile = I->first;
1559 if (!Profile.isExactProfile())
1560 continue;
1561
1562 const NamedDecl *Base = Profile.getBase();
1563 if (!Base)
1564 Base = Profile.getProperty();
1565 assert(Base && "A profile always has a base or property.");
1566
1567 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Val: Base))
1568 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Val: Base))
1569 continue;
1570 }
1571 }
1572
1573 UsesByStmt.push_back(Elt: StmtUsesPair(UI->getUseExpr(), I));
1574 }
1575
1576 if (UsesByStmt.empty())
1577 return;
1578
1579 // Sort by first use so that we emit the warnings in a deterministic order.
1580 SourceManager &SM = S.getSourceManager();
1581 llvm::sort(C&: UsesByStmt,
1582 Comp: [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1583 return SM.isBeforeInTranslationUnit(LHS: LHS.first->getBeginLoc(),
1584 RHS: RHS.first->getBeginLoc());
1585 });
1586
1587 // Classify the current code body for better warning text.
1588 // This enum should stay in sync with the cases in
1589 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1590 // FIXME: Should we use a common classification enum and the same set of
1591 // possibilities all throughout Sema?
1592 enum {
1593 Function,
1594 Method,
1595 Block,
1596 Lambda
1597 } FunctionKind;
1598
1599 if (isa<sema::BlockScopeInfo>(Val: CurFn))
1600 FunctionKind = Block;
1601 else if (isa<sema::LambdaScopeInfo>(Val: CurFn))
1602 FunctionKind = Lambda;
1603 else if (isa<ObjCMethodDecl>(Val: D))
1604 FunctionKind = Method;
1605 else
1606 FunctionKind = Function;
1607
1608 // Iterate through the sorted problems and emit warnings for each.
1609 for (const auto &P : UsesByStmt) {
1610 const Stmt *FirstRead = P.first;
1611 const WeakObjectProfileTy &Key = P.second->first;
1612 const WeakUseVector &Uses = P.second->second;
1613
1614 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1615 // may not contain enough information to determine that these are different
1616 // properties. We can only be 100% sure of a repeated use in certain cases,
1617 // and we adjust the diagnostic kind accordingly so that the less certain
1618 // case can be turned off if it is too noisy.
1619 unsigned DiagKind;
1620 if (Key.isExactProfile())
1621 DiagKind = diag::warn_arc_repeated_use_of_weak;
1622 else
1623 DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1624
1625 // Classify the weak object being accessed for better warning text.
1626 // This enum should stay in sync with the cases in
1627 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1628 enum {
1629 Variable,
1630 Property,
1631 ImplicitProperty,
1632 Ivar
1633 } ObjectKind;
1634
1635 const NamedDecl *KeyProp = Key.getProperty();
1636 if (isa<VarDecl>(Val: KeyProp))
1637 ObjectKind = Variable;
1638 else if (isa<ObjCPropertyDecl>(Val: KeyProp))
1639 ObjectKind = Property;
1640 else if (isa<ObjCMethodDecl>(Val: KeyProp))
1641 ObjectKind = ImplicitProperty;
1642 else if (isa<ObjCIvarDecl>(Val: KeyProp))
1643 ObjectKind = Ivar;
1644 else
1645 llvm_unreachable("Unexpected weak object kind!");
1646
1647 // Do not warn about IBOutlet weak property receivers being set to null
1648 // since they are typically only used from the main thread.
1649 if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(Val: KeyProp))
1650 if (Prop->hasAttr<IBOutletAttr>())
1651 continue;
1652
1653 // Show the first time the object was read.
1654 S.Diag(Loc: FirstRead->getBeginLoc(), DiagID: DiagKind)
1655 << int(ObjectKind) << KeyProp << int(FunctionKind)
1656 << FirstRead->getSourceRange();
1657
1658 // Print all the other accesses as notes.
1659 for (const auto &Use : Uses) {
1660 if (Use.getUseExpr() == FirstRead)
1661 continue;
1662 S.Diag(Loc: Use.getUseExpr()->getBeginLoc(),
1663 DiagID: diag::note_arc_weak_also_accessed_here)
1664 << Use.getUseExpr()->getSourceRange();
1665 }
1666 }
1667}
1668
1669namespace clang {
1670namespace {
1671typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1672typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1673typedef std::list<DelayedDiag> DiagList;
1674
1675struct SortDiagBySourceLocation {
1676 SourceManager &SM;
1677 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1678
1679 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1680 // Although this call will be slow, this is only called when outputting
1681 // multiple warnings.
1682 return SM.isBeforeInTranslationUnit(LHS: left.first.first, RHS: right.first.first);
1683 }
1684};
1685} // anonymous namespace
1686} // namespace clang
1687
1688namespace {
1689class UninitValsDiagReporter : public UninitVariablesHandler {
1690 Sema &S;
1691 typedef SmallVector<UninitUse, 2> UsesVec;
1692 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1693 // Prefer using MapVector to DenseMap, so that iteration order will be
1694 // the same as insertion order. This is needed to obtain a deterministic
1695 // order of diagnostics when calling flushDiagnostics().
1696 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1697 UsesMap uses;
1698
1699public:
1700 UninitValsDiagReporter(Sema &S) : S(S) {}
1701 ~UninitValsDiagReporter() override { flushDiagnostics(); }
1702
1703 MappedType &getUses(const VarDecl *vd) {
1704 MappedType &V = uses[vd];
1705 if (!V.getPointer())
1706 V.setPointer(new UsesVec());
1707 return V;
1708 }
1709
1710 void handleUseOfUninitVariable(const VarDecl *vd,
1711 const UninitUse &use) override {
1712 getUses(vd).getPointer()->push_back(Elt: use);
1713 }
1714
1715 void handleSelfInit(const VarDecl *vd) override { getUses(vd).setInt(true); }
1716
1717 void flushDiagnostics() {
1718 for (const auto &P : uses) {
1719 const VarDecl *vd = P.first;
1720 const MappedType &V = P.second;
1721
1722 UsesVec *vec = V.getPointer();
1723 bool hasSelfInit = V.getInt();
1724
1725 diagnoseUnitializedVar(vd, hasSelfInit, vec);
1726
1727 // Release the uses vector.
1728 delete vec;
1729 }
1730
1731 uses.clear();
1732 }
1733
1734private:
1735 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1736 return llvm::any_of(Range: *vec, P: [](const UninitUse &U) {
1737 return U.getKind() == UninitUse::Always ||
1738 U.getKind() == UninitUse::AfterCall ||
1739 U.getKind() == UninitUse::AfterDecl;
1740 });
1741 }
1742
1743 // Print the diagnostic for the variable. We try to warn only on the first
1744 // point at which a variable is used uninitialized. After the first
1745 // diagnostic is printed, further diagnostics for this variable are skipped.
1746 void diagnoseUnitializedVar(const VarDecl *vd, bool hasSelfInit,
1747 UsesVec *vec) {
1748 // Specially handle the case where we have uses of an uninitialized
1749 // variable, but the root cause is an idiomatic self-init. We want
1750 // to report the diagnostic at the self-init since that is the root cause.
1751 if (hasSelfInit && hasAlwaysUninitializedUse(vec)) {
1752 if (DiagnoseUninitializedUse(S, VD: vd,
1753 Use: UninitUse(vd->getInit()->IgnoreParenCasts(),
1754 /*isAlwaysUninit=*/true),
1755 /*alwaysReportSelfInit=*/true))
1756 return;
1757 }
1758
1759 // Sort the uses by their SourceLocations. While not strictly
1760 // guaranteed to produce them in line/column order, this will provide
1761 // a stable ordering.
1762 llvm::sort(C&: *vec, Comp: [](const UninitUse &a, const UninitUse &b) {
1763 // Prefer the direct use of an uninitialized variable over its use via
1764 // constant reference or pointer.
1765 if (a.isConstRefOrPtrUse() != b.isConstRefOrPtrUse())
1766 return b.isConstRefOrPtrUse();
1767 // Prefer a more confident report over a less confident one.
1768 if (a.getKind() != b.getKind())
1769 return a.getKind() > b.getKind();
1770 return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1771 });
1772
1773 for (const auto &U : *vec) {
1774 if (U.isConstRefUse()) {
1775 if (DiagnoseUninitializedConstRefUse(S, VD: vd, Use: U))
1776 return;
1777 } else if (U.isConstPtrUse()) {
1778 if (DiagnoseUninitializedConstPtrUse(S, VD: vd, Use: U))
1779 return;
1780 } else {
1781 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1782 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1783 if (DiagnoseUninitializedUse(S, VD: vd, Use))
1784 return;
1785 }
1786 }
1787 }
1788};
1789
1790/// Inter-procedural data for the called-once checker.
1791class CalledOnceInterProceduralData {
1792public:
1793 // Add the delayed warning for the given block.
1794 void addDelayedWarning(const BlockDecl *Block,
1795 PartialDiagnosticAt &&Warning) {
1796 DelayedBlockWarnings[Block].emplace_back(Args: std::move(Warning));
1797 }
1798 // Report all of the warnings we've gathered for the given block.
1799 void flushWarnings(const BlockDecl *Block, Sema &S) {
1800 for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1801 S.Diag(Loc: Delayed.first, PD: Delayed.second);
1802
1803 discardWarnings(Block);
1804 }
1805 // Discard all of the warnings we've gathered for the given block.
1806 void discardWarnings(const BlockDecl *Block) {
1807 DelayedBlockWarnings.erase(Val: Block);
1808 }
1809
1810private:
1811 using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1812 llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1813};
1814
1815class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1816public:
1817 CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1818 : S(S), Data(Data) {}
1819 void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1820 const Expr *PrevCall, bool IsCompletionHandler,
1821 bool Poised) override {
1822 auto DiagToReport = IsCompletionHandler
1823 ? diag::warn_completion_handler_called_twice
1824 : diag::warn_called_once_gets_called_twice;
1825 S.Diag(Loc: Call->getBeginLoc(), DiagID: DiagToReport) << Parameter;
1826 S.Diag(Loc: PrevCall->getBeginLoc(), DiagID: diag::note_called_once_gets_called_twice)
1827 << Poised;
1828 }
1829
1830 void handleNeverCalled(const ParmVarDecl *Parameter,
1831 bool IsCompletionHandler) override {
1832 auto DiagToReport = IsCompletionHandler
1833 ? diag::warn_completion_handler_never_called
1834 : diag::warn_called_once_never_called;
1835 S.Diag(Loc: Parameter->getBeginLoc(), DiagID: DiagToReport)
1836 << Parameter << /* Captured */ false;
1837 }
1838
1839 void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1840 const Stmt *Where, NeverCalledReason Reason,
1841 bool IsCalledDirectly,
1842 bool IsCompletionHandler) override {
1843 auto DiagToReport = IsCompletionHandler
1844 ? diag::warn_completion_handler_never_called_when
1845 : diag::warn_called_once_never_called_when;
1846 PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagID: DiagToReport)
1847 << Parameter
1848 << IsCalledDirectly
1849 << (unsigned)Reason);
1850
1851 if (const auto *Block = dyn_cast<BlockDecl>(Val: Function)) {
1852 // We shouldn't report these warnings on blocks immediately
1853 Data.addDelayedWarning(Block, Warning: std::move(Warning));
1854 } else {
1855 S.Diag(Loc: Warning.first, PD: Warning.second);
1856 }
1857 }
1858
1859 void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1860 const Decl *Where,
1861 bool IsCompletionHandler) override {
1862 auto DiagToReport = IsCompletionHandler
1863 ? diag::warn_completion_handler_never_called
1864 : diag::warn_called_once_never_called;
1865 S.Diag(Loc: Where->getBeginLoc(), DiagID: DiagToReport)
1866 << Parameter << /* Captured */ true;
1867 }
1868
1869 void
1870 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1871 Data.flushWarnings(Block, S);
1872 }
1873
1874 void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1875 Data.discardWarnings(Block);
1876 }
1877
1878private:
1879 Sema &S;
1880 CalledOnceInterProceduralData &Data;
1881};
1882
1883constexpr unsigned CalledOnceWarnings[] = {
1884 diag::warn_called_once_never_called,
1885 diag::warn_called_once_never_called_when,
1886 diag::warn_called_once_gets_called_twice};
1887
1888constexpr unsigned CompletionHandlerWarnings[]{
1889 diag::warn_completion_handler_never_called,
1890 diag::warn_completion_handler_never_called_when,
1891 diag::warn_completion_handler_called_twice};
1892
1893bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1894 const DiagnosticsEngine &Diags,
1895 SourceLocation At) {
1896 return llvm::any_of(Range&: DiagIDs, P: [&Diags, At](unsigned DiagID) {
1897 return !Diags.isIgnored(DiagID, Loc: At);
1898 });
1899}
1900
1901bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1902 SourceLocation At) {
1903 return shouldAnalyzeCalledOnceImpl(DiagIDs: CompletionHandlerWarnings, Diags, At);
1904}
1905
1906bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1907 SourceLocation At) {
1908 return shouldAnalyzeCalledOnceImpl(DiagIDs: CalledOnceWarnings, Diags, At) ||
1909 shouldAnalyzeCalledOnceConventions(Diags, At);
1910}
1911} // anonymous namespace
1912
1913//===----------------------------------------------------------------------===//
1914// -Wthread-safety
1915//===----------------------------------------------------------------------===//
1916namespace clang {
1917namespace threadSafety {
1918namespace {
1919class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1920 Sema &S;
1921 DiagList Warnings;
1922 SourceLocation FunLocation, FunEndLocation;
1923
1924 const FunctionDecl *CurrentFunction;
1925 bool Verbose;
1926
1927 OptionalNotes getNotes() const {
1928 if (Verbose && CurrentFunction) {
1929 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1930 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1931 << CurrentFunction);
1932 return OptionalNotes(1, FNote);
1933 }
1934 return OptionalNotes();
1935 }
1936
1937 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1938 OptionalNotes ONS(1, Note);
1939 if (Verbose && CurrentFunction) {
1940 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1941 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1942 << CurrentFunction);
1943 ONS.push_back(Elt: std::move(FNote));
1944 }
1945 return ONS;
1946 }
1947
1948 OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1949 const PartialDiagnosticAt &Note2) const {
1950 OptionalNotes ONS;
1951 ONS.push_back(Elt: Note1);
1952 ONS.push_back(Elt: Note2);
1953 if (Verbose && CurrentFunction) {
1954 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1955 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1956 << CurrentFunction);
1957 ONS.push_back(Elt: std::move(FNote));
1958 }
1959 return ONS;
1960 }
1961
1962 OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1963 return LocLocked.isValid()
1964 ? getNotes(Note: PartialDiagnosticAt(
1965 LocLocked, S.PDiag(DiagID: diag::note_locked_here) << Kind))
1966 : getNotes();
1967 }
1968
1969 OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1970 StringRef Kind) {
1971 return LocUnlocked.isValid()
1972 ? getNotes(Note: PartialDiagnosticAt(
1973 LocUnlocked, S.PDiag(DiagID: diag::note_unlocked_here) << Kind))
1974 : getNotes();
1975 }
1976
1977 OptionalNotes makeManagedMismatchNoteForParam(SourceLocation DeclLoc) {
1978 return DeclLoc.isValid()
1979 ? getNotes(Note: PartialDiagnosticAt(
1980 DeclLoc,
1981 S.PDiag(DiagID: diag::note_managed_mismatch_here_for_param)))
1982 : getNotes();
1983 }
1984
1985 public:
1986 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1987 : S(S), FunLocation(FL), FunEndLocation(FEL),
1988 CurrentFunction(nullptr), Verbose(false) {}
1989
1990 void setVerbose(bool b) { Verbose = b; }
1991
1992 /// Emit all buffered diagnostics in order of sourcelocation.
1993 /// We need to output diagnostics produced while iterating through
1994 /// the lockset in deterministic order, so this function orders diagnostics
1995 /// and outputs them.
1996 void emitDiagnostics() {
1997 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
1998 for (const auto &Diag : Warnings) {
1999 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
2000 for (const auto &Note : Diag.second)
2001 S.Diag(Loc: Note.first, PD: Note.second);
2002 }
2003 }
2004
2005 void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc,
2006 Name scopeName, StringRef Kind,
2007 Name expected, Name actual) override {
2008 PartialDiagnosticAt Warning(Loc,
2009 S.PDiag(DiagID: diag::warn_unmatched_underlying_mutexes)
2010 << Kind << scopeName << expected << actual);
2011 Warnings.emplace_back(args: std::move(Warning),
2012 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2013 }
2014
2015 void handleExpectMoreUnderlyingMutexes(SourceLocation Loc,
2016 SourceLocation DLoc, Name scopeName,
2017 StringRef Kind,
2018 Name expected) override {
2019 PartialDiagnosticAt Warning(
2020 Loc, S.PDiag(DiagID: diag::warn_expect_more_underlying_mutexes)
2021 << Kind << scopeName << expected);
2022 Warnings.emplace_back(args: std::move(Warning),
2023 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2024 }
2025
2026 void handleExpectFewerUnderlyingMutexes(SourceLocation Loc,
2027 SourceLocation DLoc, Name scopeName,
2028 StringRef Kind,
2029 Name actual) override {
2030 PartialDiagnosticAt Warning(
2031 Loc, S.PDiag(DiagID: diag::warn_expect_fewer_underlying_mutexes)
2032 << Kind << scopeName << actual);
2033 Warnings.emplace_back(args: std::move(Warning),
2034 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2035 }
2036
2037 void handleInvalidLockExp(SourceLocation Loc) override {
2038 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_cannot_resolve_lock)
2039 << Loc);
2040 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2041 }
2042
2043 void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
2044 SourceLocation LocPreviousUnlock) override {
2045 if (Loc.isInvalid())
2046 Loc = FunLocation;
2047 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_unlock_but_no_lock)
2048 << Kind << LockName);
2049 Warnings.emplace_back(args: std::move(Warning),
2050 args: makeUnlockedHereNote(LocUnlocked: LocPreviousUnlock, Kind));
2051 }
2052
2053 void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
2054 LockKind Expected, LockKind Received,
2055 SourceLocation LocLocked,
2056 SourceLocation LocUnlock) override {
2057 if (LocUnlock.isInvalid())
2058 LocUnlock = FunLocation;
2059 PartialDiagnosticAt Warning(
2060 LocUnlock, S.PDiag(DiagID: diag::warn_unlock_kind_mismatch)
2061 << Kind << LockName << Received << Expected);
2062 Warnings.emplace_back(args: std::move(Warning),
2063 args: makeLockedHereNote(LocLocked, Kind));
2064 }
2065
2066 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
2067 SourceLocation LocDoubleLock) override {
2068 if (LocDoubleLock.isInvalid())
2069 LocDoubleLock = FunLocation;
2070 PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(DiagID: diag::warn_double_lock)
2071 << Kind << LockName);
2072 Warnings.emplace_back(args: std::move(Warning),
2073 args: makeLockedHereNote(LocLocked, Kind));
2074 }
2075
2076 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
2077 SourceLocation LocLocked,
2078 SourceLocation LocEndOfScope,
2079 LockErrorKind LEK,
2080 bool ReentrancyMismatch) override {
2081 unsigned DiagID = 0;
2082 switch (LEK) {
2083 case LEK_LockedSomePredecessors:
2084 DiagID = diag::warn_lock_some_predecessors;
2085 break;
2086 case LEK_LockedSomeLoopIterations:
2087 DiagID = diag::warn_expecting_lock_held_on_loop;
2088 break;
2089 case LEK_LockedAtEndOfFunction:
2090 DiagID = diag::warn_no_unlock;
2091 break;
2092 case LEK_NotLockedAtEndOfFunction:
2093 DiagID = diag::warn_expecting_locked;
2094 break;
2095 }
2096 if (LocEndOfScope.isInvalid())
2097 LocEndOfScope = FunEndLocation;
2098
2099 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID)
2100 << Kind << LockName
2101 << ReentrancyMismatch);
2102 Warnings.emplace_back(args: std::move(Warning),
2103 args: makeLockedHereNote(LocLocked, Kind));
2104 }
2105
2106 void handleExclusiveAndShared(StringRef Kind, Name LockName,
2107 SourceLocation Loc1,
2108 SourceLocation Loc2) override {
2109 PartialDiagnosticAt Warning(Loc1,
2110 S.PDiag(DiagID: diag::warn_lock_exclusive_and_shared)
2111 << Kind << LockName);
2112 PartialDiagnosticAt Note(Loc2, S.PDiag(DiagID: diag::note_lock_exclusive_and_shared)
2113 << Kind << LockName);
2114 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2115 }
2116
2117 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
2118 AccessKind AK, SourceLocation Loc) override {
2119 unsigned DiagID = 0;
2120 switch (POK) {
2121 case POK_VarAccess:
2122 case POK_PassByRef:
2123 case POK_ReturnByRef:
2124 case POK_PassPointer:
2125 case POK_ReturnPointer:
2126 DiagID = diag::warn_variable_requires_any_lock;
2127 break;
2128 case POK_VarDereference:
2129 case POK_PtPassByRef:
2130 case POK_PtReturnByRef:
2131 case POK_PtPassPointer:
2132 case POK_PtReturnPointer:
2133 DiagID = diag::warn_var_deref_requires_any_lock;
2134 break;
2135 case POK_FunctionCall:
2136 llvm_unreachable("Only works for variables");
2137 break;
2138 }
2139 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
2140 << D << getLockKindFromAccessKind(AK));
2141 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2142 }
2143
2144 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
2145 ProtectedOperationKind POK, Name LockName,
2146 LockKind LK, SourceLocation Loc,
2147 Name *PossibleMatch) override {
2148 unsigned DiagID = 0;
2149 if (PossibleMatch) {
2150 switch (POK) {
2151 case POK_VarAccess:
2152 DiagID = diag::warn_variable_requires_lock_precise;
2153 break;
2154 case POK_VarDereference:
2155 DiagID = diag::warn_var_deref_requires_lock_precise;
2156 break;
2157 case POK_FunctionCall:
2158 DiagID = diag::warn_fun_requires_lock_precise;
2159 break;
2160 case POK_PassByRef:
2161 DiagID = diag::warn_guarded_pass_by_reference;
2162 break;
2163 case POK_PtPassByRef:
2164 DiagID = diag::warn_pt_guarded_pass_by_reference;
2165 break;
2166 case POK_ReturnByRef:
2167 DiagID = diag::warn_guarded_return_by_reference;
2168 break;
2169 case POK_PtReturnByRef:
2170 DiagID = diag::warn_pt_guarded_return_by_reference;
2171 break;
2172 case POK_PassPointer:
2173 DiagID = diag::warn_guarded_pass_pointer;
2174 break;
2175 case POK_PtPassPointer:
2176 DiagID = diag::warn_pt_guarded_pass_pointer;
2177 break;
2178 case POK_ReturnPointer:
2179 DiagID = diag::warn_guarded_return_pointer;
2180 break;
2181 case POK_PtReturnPointer:
2182 DiagID = diag::warn_pt_guarded_return_pointer;
2183 break;
2184 }
2185 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2186 << D
2187 << LockName << LK);
2188 PartialDiagnosticAt Note(Loc, S.PDiag(DiagID: diag::note_found_mutex_near_match)
2189 << *PossibleMatch);
2190 if (Verbose && POK == POK_VarAccess) {
2191 PartialDiagnosticAt VNote(D->getLocation(),
2192 S.PDiag(DiagID: diag::note_guarded_by_declared_here)
2193 << D->getDeclName());
2194 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note1: Note, Note2: VNote));
2195 } else
2196 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2197 } else {
2198 switch (POK) {
2199 case POK_VarAccess:
2200 DiagID = diag::warn_variable_requires_lock;
2201 break;
2202 case POK_VarDereference:
2203 DiagID = diag::warn_var_deref_requires_lock;
2204 break;
2205 case POK_FunctionCall:
2206 DiagID = diag::warn_fun_requires_lock;
2207 break;
2208 case POK_PassByRef:
2209 DiagID = diag::warn_guarded_pass_by_reference;
2210 break;
2211 case POK_PtPassByRef:
2212 DiagID = diag::warn_pt_guarded_pass_by_reference;
2213 break;
2214 case POK_ReturnByRef:
2215 DiagID = diag::warn_guarded_return_by_reference;
2216 break;
2217 case POK_PtReturnByRef:
2218 DiagID = diag::warn_pt_guarded_return_by_reference;
2219 break;
2220 case POK_PassPointer:
2221 DiagID = diag::warn_guarded_pass_pointer;
2222 break;
2223 case POK_PtPassPointer:
2224 DiagID = diag::warn_pt_guarded_pass_pointer;
2225 break;
2226 case POK_ReturnPointer:
2227 DiagID = diag::warn_guarded_return_pointer;
2228 break;
2229 case POK_PtReturnPointer:
2230 DiagID = diag::warn_pt_guarded_return_pointer;
2231 break;
2232 }
2233 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2234 << D
2235 << LockName << LK);
2236 if (Verbose && POK == POK_VarAccess) {
2237 PartialDiagnosticAt Note(D->getLocation(),
2238 S.PDiag(DiagID: diag::note_guarded_by_declared_here));
2239 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2240 } else
2241 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2242 }
2243 }
2244
2245 void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2246 SourceLocation Loc) override {
2247 PartialDiagnosticAt Warning(Loc,
2248 S.PDiag(DiagID: diag::warn_acquire_requires_negative_cap)
2249 << Kind << LockName << Neg);
2250 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2251 }
2252
2253 void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2254 SourceLocation Loc) override {
2255 PartialDiagnosticAt Warning(
2256 Loc, S.PDiag(DiagID: diag::warn_fun_requires_negative_cap) << D << LockName);
2257 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2258 }
2259
2260 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2261 SourceLocation Loc) override {
2262 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_fun_excludes_mutex)
2263 << Kind << FunName << LockName);
2264 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2265 }
2266
2267 void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2268 SourceLocation Loc) override {
2269 PartialDiagnosticAt Warning(Loc,
2270 S.PDiag(DiagID: diag::warn_acquired_before) << Kind << L1Name << L2Name);
2271 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2272 }
2273
2274 void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2275 PartialDiagnosticAt Warning(Loc,
2276 S.PDiag(DiagID: diag::warn_acquired_before_after_cycle) << L1Name);
2277 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2278 }
2279
2280 void enterFunction(const FunctionDecl* FD) override {
2281 CurrentFunction = FD;
2282 }
2283
2284 void leaveFunction(const FunctionDecl* FD) override {
2285 CurrentFunction = nullptr;
2286 }
2287};
2288} // anonymous namespace
2289} // namespace threadSafety
2290} // namespace clang
2291
2292//===----------------------------------------------------------------------===//
2293// -Wconsumed
2294//===----------------------------------------------------------------------===//
2295
2296namespace clang {
2297namespace consumed {
2298namespace {
2299class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2300
2301 Sema &S;
2302 DiagList Warnings;
2303
2304public:
2305
2306 ConsumedWarningsHandler(Sema &S) : S(S) {}
2307
2308 void emitDiagnostics() override {
2309 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
2310 for (const auto &Diag : Warnings) {
2311 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
2312 for (const auto &Note : Diag.second)
2313 S.Diag(Loc: Note.first, PD: Note.second);
2314 }
2315 }
2316
2317 void warnLoopStateMismatch(SourceLocation Loc,
2318 StringRef VariableName) override {
2319 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_loop_state_mismatch) <<
2320 VariableName);
2321
2322 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2323 }
2324
2325 void warnParamReturnTypestateMismatch(SourceLocation Loc,
2326 StringRef VariableName,
2327 StringRef ExpectedState,
2328 StringRef ObservedState) override {
2329
2330 PartialDiagnosticAt Warning(Loc, S.PDiag(
2331 DiagID: diag::warn_param_return_typestate_mismatch) << VariableName <<
2332 ExpectedState << ObservedState);
2333
2334 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2335 }
2336
2337 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2338 StringRef ObservedState) override {
2339
2340 PartialDiagnosticAt Warning(Loc, S.PDiag(
2341 DiagID: diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2342
2343 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2344 }
2345
2346 void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2347 StringRef TypeName) override {
2348 PartialDiagnosticAt Warning(Loc, S.PDiag(
2349 DiagID: diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2350
2351 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2352 }
2353
2354 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2355 StringRef ObservedState) override {
2356
2357 PartialDiagnosticAt Warning(Loc, S.PDiag(
2358 DiagID: diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2359
2360 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2361 }
2362
2363 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2364 SourceLocation Loc) override {
2365
2366 PartialDiagnosticAt Warning(Loc, S.PDiag(
2367 DiagID: diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2368
2369 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2370 }
2371
2372 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2373 StringRef State, SourceLocation Loc) override {
2374
2375 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_use_in_invalid_state) <<
2376 MethodName << VariableName << State);
2377
2378 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2379 }
2380};
2381} // anonymous namespace
2382} // namespace consumed
2383} // namespace clang
2384
2385//===----------------------------------------------------------------------===//
2386// Unsafe buffer usage analysis.
2387//===----------------------------------------------------------------------===//
2388
2389namespace {
2390class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2391 Sema &S;
2392 bool SuggestSuggestions; // Recommend -fsafe-buffer-usage-suggestions?
2393
2394 // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2395 // itself:
2396 std::string listVariableGroupAsString(
2397 const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2398 if (VarGroupForVD.size() <= 1)
2399 return "";
2400
2401 std::vector<StringRef> VarNames;
2402 auto PutInQuotes = [](StringRef S) -> std::string {
2403 return "'" + S.str() + "'";
2404 };
2405
2406 for (auto *V : VarGroupForVD) {
2407 if (V == VD)
2408 continue;
2409 VarNames.push_back(x: V->getName());
2410 }
2411 if (VarNames.size() == 1) {
2412 return PutInQuotes(VarNames[0]);
2413 }
2414 if (VarNames.size() == 2) {
2415 return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2416 }
2417 assert(VarGroupForVD.size() > 3);
2418 const unsigned N = VarNames.size() -
2419 2; // need to print the last two names as "..., X, and Y"
2420 std::string AllVars = "";
2421
2422 for (unsigned I = 0; I < N; ++I)
2423 AllVars.append(str: PutInQuotes(VarNames[I]) + ", ");
2424 AllVars.append(str: PutInQuotes(VarNames[N]) + ", and " +
2425 PutInQuotes(VarNames[N + 1]));
2426 return AllVars;
2427 }
2428
2429public:
2430 UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2431 : S(S), SuggestSuggestions(SuggestSuggestions) {}
2432
2433 void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2434 ASTContext &Ctx) override {
2435 SourceLocation Loc;
2436 SourceRange Range;
2437 unsigned MsgParam = 0;
2438 NamedDecl *D = nullptr;
2439 if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Val: Operation)) {
2440 Loc = ASE->getBase()->getExprLoc();
2441 Range = ASE->getBase()->getSourceRange();
2442 MsgParam = 2;
2443 } else if (const auto *BO = dyn_cast<BinaryOperator>(Val: Operation)) {
2444 BinaryOperator::Opcode Op = BO->getOpcode();
2445 if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2446 Op == BO_SubAssign) {
2447 if (BO->getRHS()->getType()->isIntegerType()) {
2448 Loc = BO->getLHS()->getExprLoc();
2449 Range = BO->getLHS()->getSourceRange();
2450 } else {
2451 Loc = BO->getRHS()->getExprLoc();
2452 Range = BO->getRHS()->getSourceRange();
2453 }
2454 MsgParam = 1;
2455 }
2456 } else if (const auto *UO = dyn_cast<UnaryOperator>(Val: Operation)) {
2457 UnaryOperator::Opcode Op = UO->getOpcode();
2458 if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2459 Op == UO_PostDec) {
2460 Loc = UO->getSubExpr()->getExprLoc();
2461 Range = UO->getSubExpr()->getSourceRange();
2462 MsgParam = 1;
2463 }
2464 } else {
2465 if (isa<CallExpr>(Val: Operation) || isa<CXXConstructExpr>(Val: Operation)) {
2466 // note_unsafe_buffer_operation doesn't have this mode yet.
2467 assert(!IsRelatedToDecl && "Not implemented yet!");
2468 MsgParam = 3;
2469 } else if (isa<MemberExpr>(Val: Operation)) {
2470 // note_unsafe_buffer_operation doesn't have this mode yet.
2471 assert(!IsRelatedToDecl && "Not implemented yet!");
2472 auto *ME = cast<MemberExpr>(Val: Operation);
2473 D = ME->getMemberDecl();
2474 MsgParam = 5;
2475 } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Val: Operation)) {
2476 QualType destType = ECE->getType();
2477 bool destTypeComplete = true;
2478
2479 if (!isa<PointerType>(Val: destType))
2480 return;
2481 destType = destType.getTypePtr()->getPointeeType();
2482 if (const auto *D = destType->getAsTagDecl())
2483 destTypeComplete = D->isCompleteDefinition();
2484
2485 // If destination type is incomplete, it is unsafe to cast to anyway, no
2486 // need to check its type:
2487 if (destTypeComplete) {
2488 const uint64_t dSize = Ctx.getTypeSize(T: destType);
2489 QualType srcType = ECE->getSubExpr()->getType();
2490
2491 assert(srcType->isPointerType());
2492
2493 const uint64_t sSize =
2494 Ctx.getTypeSize(T: srcType.getTypePtr()->getPointeeType());
2495
2496 if (sSize >= dSize)
2497 return;
2498 }
2499 if (const auto *CE = dyn_cast<CXXMemberCallExpr>(
2500 Val: ECE->getSubExpr()->IgnoreParens())) {
2501 D = CE->getMethodDecl();
2502 }
2503
2504 if (!D)
2505 return;
2506
2507 MsgParam = 4;
2508 }
2509 Loc = Operation->getBeginLoc();
2510 Range = Operation->getSourceRange();
2511 }
2512 if (IsRelatedToDecl) {
2513 assert(!SuggestSuggestions &&
2514 "Variables blamed for unsafe buffer usage without suggestions!");
2515 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2516 } else {
2517 if (D) {
2518 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation)
2519 << MsgParam << D << Range;
2520 } else {
2521 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2522 }
2523 if (SuggestSuggestions) {
2524 S.Diag(Loc, DiagID: diag::note_safe_buffer_usage_suggestions_disabled);
2525 }
2526 }
2527 }
2528
2529 void handleUnsafeLibcCall(const CallExpr *Call, unsigned PrintfInfo,
2530 ASTContext &Ctx,
2531 const Expr *UnsafeArg = nullptr) override {
2532 unsigned DiagID = diag::warn_unsafe_buffer_libc_call;
2533 if (PrintfInfo & 0x8) {
2534 // The callee is a function with the format attribute. See the
2535 // documentation of PrintfInfo in UnsafeBufferUsageHandler, and
2536 // UnsafeLibcFunctionCallGadget::UnsafeKind.
2537 DiagID = diag::warn_unsafe_buffer_format_attr_call;
2538 PrintfInfo ^= 0x8;
2539 }
2540 S.Diag(Loc: Call->getBeginLoc(), DiagID)
2541 << Call->getDirectCallee() // We've checked there is a direct callee
2542 << Call->getSourceRange();
2543 if (PrintfInfo > 0) {
2544 SourceRange R =
2545 UnsafeArg ? UnsafeArg->getSourceRange() : Call->getSourceRange();
2546 S.Diag(Loc: R.getBegin(), DiagID: diag::note_unsafe_buffer_printf_call)
2547 << PrintfInfo << R;
2548 }
2549 }
2550
2551 void handleUnsafeOperationInContainer(const Stmt *Operation,
2552 bool IsRelatedToDecl,
2553 ASTContext &Ctx) override {
2554 SourceLocation Loc;
2555 SourceRange Range;
2556 unsigned MsgParam = 0;
2557
2558 // This function only handles SpanTwoParamConstructorGadget so far, which
2559 // always gives a CXXConstructExpr.
2560 const auto *CtorExpr = cast<CXXConstructExpr>(Val: Operation);
2561 Loc = CtorExpr->getLocation();
2562
2563 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_in_container);
2564 if (IsRelatedToDecl) {
2565 assert(!SuggestSuggestions &&
2566 "Variables blamed for unsafe buffer usage without suggestions!");
2567 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2568 }
2569 }
2570
2571 void handleUnsafeVariableGroup(const VarDecl *Variable,
2572 const VariableGroupsManager &VarGrpMgr,
2573 FixItList &&Fixes, const Decl *D,
2574 const FixitStrategy &VarTargetTypes) override {
2575 assert(!SuggestSuggestions &&
2576 "Unsafe buffer usage fixits displayed without suggestions!");
2577 S.Diag(Loc: Variable->getLocation(), DiagID: diag::warn_unsafe_buffer_variable)
2578 << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2579 << Variable->getSourceRange();
2580 if (!Fixes.empty()) {
2581 assert(isa<NamedDecl>(D) &&
2582 "Fix-its are generated only for `NamedDecl`s");
2583 const NamedDecl *ND = cast<NamedDecl>(Val: D);
2584 bool BriefMsg = false;
2585 // If the variable group involves parameters, the diagnostic message will
2586 // NOT explain how the variables are grouped as the reason is non-trivial
2587 // and irrelavant to users' experience:
2588 const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Var: Variable, HasParm: &BriefMsg);
2589 unsigned FixItStrategy = 0;
2590 switch (VarTargetTypes.lookup(VD: Variable)) {
2591 case clang::FixitStrategy::Kind::Span:
2592 FixItStrategy = 0;
2593 break;
2594 case clang::FixitStrategy::Kind::Array:
2595 FixItStrategy = 1;
2596 break;
2597 default:
2598 assert(false && "We support only std::span and std::array");
2599 };
2600
2601 const auto &FD =
2602 S.Diag(Loc: Variable->getLocation(),
2603 DiagID: BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2604 : diag::note_unsafe_buffer_variable_fixit_group);
2605
2606 FD << Variable << FixItStrategy;
2607 FD << listVariableGroupAsString(VD: Variable, VarGroupForVD)
2608 << (VarGroupForVD.size() > 1) << ND;
2609 for (const auto &F : Fixes) {
2610 FD << F;
2611 }
2612 }
2613
2614#ifndef NDEBUG
2615 if (areDebugNotesRequested())
2616 for (const DebugNote &Note: DebugNotesByVar[Variable])
2617 S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2618#endif
2619 }
2620
2621 void handleUnsafeUniquePtrArrayAccess(const DynTypedNode &Node,
2622 bool IsRelatedToDecl,
2623 ASTContext &Ctx) override {
2624 SourceLocation Loc;
2625
2626 Loc = Node.get<Stmt>()->getBeginLoc();
2627 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_unique_ptr_array_access)
2628 << Node.getSourceRange();
2629 }
2630
2631 bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2632 return S.PP.isSafeBufferOptOut(SourceMgr: S.getSourceManager(), Loc);
2633 }
2634
2635 bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2636 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container, Loc);
2637 }
2638
2639 bool ignoreUnsafeBufferInLibcCall(const SourceLocation &Loc) const override {
2640 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc);
2641 }
2642
2643 bool ignoreUnsafeBufferInStaticSizedArray(
2644 const SourceLocation &Loc) const override {
2645 return S.Diags.isIgnored(
2646 DiagID: diag::warn_unsafe_buffer_usage_in_static_sized_array, Loc);
2647 }
2648
2649 // Returns the text representation of clang::unsafe_buffer_usage attribute.
2650 // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2651 // characters.
2652 std::string
2653 getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2654 StringRef WSSuffix = "") const override {
2655 Preprocessor &PP = S.getPreprocessor();
2656 TokenValue ClangUnsafeBufferUsageTokens[] = {
2657 tok::l_square,
2658 tok::l_square,
2659 PP.getIdentifierInfo(Name: "clang"),
2660 tok::coloncolon,
2661 PP.getIdentifierInfo(Name: "unsafe_buffer_usage"),
2662 tok::r_square,
2663 tok::r_square};
2664
2665 StringRef MacroName;
2666
2667 // The returned macro (it returns) is guaranteed not to be function-like:
2668 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangUnsafeBufferUsageTokens);
2669 if (MacroName.empty())
2670 MacroName = "[[clang::unsafe_buffer_usage]]";
2671 return MacroName.str() + WSSuffix.str();
2672 }
2673};
2674} // namespace
2675
2676//===----------------------------------------------------------------------===//
2677// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2678// warnings on a function, method, or block.
2679//===----------------------------------------------------------------------===//
2680
2681sema::AnalysisBasedWarnings::Policy::Policy() {
2682 enableCheckFallThrough = 1;
2683 enableCheckUnreachable = 0;
2684 enableThreadSafetyAnalysis = 0;
2685 enableConsumedAnalysis = 0;
2686}
2687
2688/// InterProceduralData aims to be a storage of whatever data should be passed
2689/// between analyses of different functions.
2690///
2691/// At the moment, its primary goal is to make the information gathered during
2692/// the analysis of the blocks available during the analysis of the enclosing
2693/// function. This is important due to the fact that blocks are analyzed before
2694/// the enclosed function is even parsed fully, so it is not viable to access
2695/// anything in the outer scope while analyzing the block. On the other hand,
2696/// re-building CFG for blocks and re-analyzing them when we do have all the
2697/// information (i.e. during the analysis of the enclosing function) seems to be
2698/// ill-designed.
2699class sema::AnalysisBasedWarnings::InterProceduralData {
2700public:
2701 // It is important to analyze blocks within functions because it's a very
2702 // common pattern to capture completion handler parameters by blocks.
2703 CalledOnceInterProceduralData CalledOnceData;
2704};
2705
2706template <typename... Ts>
2707static bool areAnyEnabled(DiagnosticsEngine &D, SourceLocation Loc,
2708 Ts... Diags) {
2709 return (!D.isIgnored(DiagID: Diags, Loc) || ...);
2710}
2711
2712sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2713 : S(s), IPData(std::make_unique<InterProceduralData>()),
2714 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2715 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2716 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2717 NumUninitAnalysisBlockVisits(0),
2718 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2719}
2720
2721// We need this here for unique_ptr with forward declared class.
2722sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2723
2724sema::AnalysisBasedWarnings::Policy
2725sema::AnalysisBasedWarnings::getPolicyInEffectAt(SourceLocation Loc) {
2726 using namespace diag;
2727 DiagnosticsEngine &D = S.getDiagnostics();
2728 Policy P;
2729
2730 // Note: The enabled checks should be kept in sync with the switch in
2731 // SemaPPCallbacks::PragmaDiagnostic().
2732 P.enableCheckUnreachable =
2733 PolicyOverrides.enableCheckUnreachable ||
2734 areAnyEnabled(D, Loc, Diags: warn_unreachable, Diags: warn_unreachable_break,
2735 Diags: warn_unreachable_return, Diags: warn_unreachable_loop_increment);
2736
2737 P.enableThreadSafetyAnalysis = PolicyOverrides.enableThreadSafetyAnalysis ||
2738 areAnyEnabled(D, Loc, Diags: warn_double_lock);
2739
2740 P.enableConsumedAnalysis = PolicyOverrides.enableConsumedAnalysis ||
2741 areAnyEnabled(D, Loc, Diags: warn_use_in_invalid_state);
2742 return P;
2743}
2744
2745void sema::AnalysisBasedWarnings::clearOverrides() {
2746 PolicyOverrides.enableCheckUnreachable = false;
2747 PolicyOverrides.enableConsumedAnalysis = false;
2748 PolicyOverrides.enableThreadSafetyAnalysis = false;
2749}
2750
2751static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2752 for (const auto &D : fscope->PossiblyUnreachableDiags)
2753 S.Diag(Loc: D.Loc, PD: D.PD);
2754}
2755
2756template <typename Iterator>
2757static void emitPossiblyUnreachableDiags(Sema &S, AnalysisDeclContext &AC,
2758 std::pair<Iterator, Iterator> PUDs) {
2759
2760 if (PUDs.first == PUDs.second)
2761 return;
2762
2763 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2764 for (const Stmt *S : I->Stmts)
2765 AC.registerForcedBlockExpression(stmt: S);
2766 }
2767
2768 if (AC.getCFG()) {
2769 CFGReverseBlockReachabilityAnalysis *Analysis =
2770 AC.getCFGReachablityAnalysis();
2771
2772 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2773 const auto &D = *I;
2774 if (llvm::all_of(D.Stmts, [&](const Stmt *St) {
2775 const CFGBlock *Block = AC.getBlockForRegisteredExpression(stmt: St);
2776 // FIXME: We should be able to assert that block is non-null, but
2777 // the CFG analysis can skip potentially-evaluated expressions in
2778 // edge cases; see test/Sema/vla-2.c.
2779 if (Block && Analysis)
2780 if (!Analysis->isReachable(Src: &AC.getCFG()->getEntry(), Dst: Block))
2781 return false;
2782 return true;
2783 })) {
2784 S.Diag(D.Loc, D.PD);
2785 }
2786 }
2787 } else {
2788 for (auto I = PUDs.first; I != PUDs.second; ++I)
2789 S.Diag(I->Loc, I->PD);
2790 }
2791}
2792
2793void sema::AnalysisBasedWarnings::registerVarDeclWarning(
2794 VarDecl *VD, clang::sema::PossiblyUnreachableDiag PUD) {
2795 VarDeclPossiblyUnreachableDiags.emplace(args&: VD, args&: PUD);
2796}
2797
2798void sema::AnalysisBasedWarnings::issueWarningsForRegisteredVarDecl(
2799 VarDecl *VD) {
2800 if (!llvm::is_contained(Range&: VarDeclPossiblyUnreachableDiags, Element: VD))
2801 return;
2802
2803 AnalysisDeclContext AC(/*Mgr=*/nullptr, VD);
2804
2805 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2806 AC.getCFGBuildOptions().AddEHEdges = false;
2807 AC.getCFGBuildOptions().AddInitializers = true;
2808 AC.getCFGBuildOptions().AddImplicitDtors = true;
2809 AC.getCFGBuildOptions().AddTemporaryDtors = true;
2810 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2811 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2812
2813 auto Range = VarDeclPossiblyUnreachableDiags.equal_range(x: VD);
2814 auto SecondRange =
2815 llvm::make_second_range(c: llvm::make_range(x: Range.first, y: Range.second));
2816 emitPossiblyUnreachableDiags(
2817 S, AC, PUDs: std::make_pair(x: SecondRange.begin(), y: SecondRange.end()));
2818}
2819
2820// An AST Visitor that calls a callback function on each callable DEFINITION
2821// that is NOT in a dependent context:
2822class CallableVisitor : public DynamicRecursiveASTVisitor {
2823private:
2824 llvm::function_ref<void(const Decl *)> Callback;
2825 const Module *const TUModule;
2826
2827public:
2828 CallableVisitor(llvm::function_ref<void(const Decl *)> Callback,
2829 const Module *const TUModule)
2830 : Callback(Callback), TUModule(TUModule) {
2831 ShouldVisitTemplateInstantiations = true;
2832 ShouldVisitImplicitCode = false;
2833 }
2834
2835 bool TraverseDecl(Decl *Node) override {
2836 // For performance reasons, only validate the current translation unit's
2837 // module, and not modules it depends on.
2838 // See https://issues.chromium.org/issues/351909443 for details.
2839 if (Node && Node->getOwningModule() == TUModule)
2840 return DynamicRecursiveASTVisitor::TraverseDecl(D: Node);
2841 return true;
2842 }
2843
2844 bool VisitFunctionDecl(FunctionDecl *Node) override {
2845 if (cast<DeclContext>(Val: Node)->isDependentContext())
2846 return true; // Not to analyze dependent decl
2847 // `FunctionDecl->hasBody()` returns true if the function has a body
2848 // somewhere defined. But we want to know if this `Node` has a body
2849 // child. So we use `doesThisDeclarationHaveABody`:
2850 if (Node->doesThisDeclarationHaveABody())
2851 Callback(Node);
2852 return true;
2853 }
2854
2855 bool VisitBlockDecl(BlockDecl *Node) override {
2856 if (cast<DeclContext>(Val: Node)->isDependentContext())
2857 return true; // Not to analyze dependent decl
2858 Callback(Node);
2859 return true;
2860 }
2861
2862 bool VisitObjCMethodDecl(ObjCMethodDecl *Node) override {
2863 if (cast<DeclContext>(Val: Node)->isDependentContext())
2864 return true; // Not to analyze dependent decl
2865 if (Node->hasBody())
2866 Callback(Node);
2867 return true;
2868 }
2869
2870 bool VisitLambdaExpr(LambdaExpr *Node) override {
2871 return VisitFunctionDecl(Node: Node->getCallOperator());
2872 }
2873};
2874
2875namespace clang::lifetimes {
2876namespace {
2877class LifetimeSafetySemaHelperImpl : public LifetimeSafetySemaHelper {
2878
2879public:
2880 LifetimeSafetySemaHelperImpl(Sema &S) : S(S) {}
2881
2882 void reportUseAfterFree(const Expr *IssueExpr, const Expr *UseExpr,
2883 SourceLocation FreeLoc, Confidence C) override {
2884 S.Diag(Loc: IssueExpr->getExprLoc(),
2885 DiagID: C == Confidence::Definite
2886 ? diag::warn_lifetime_safety_loan_expires_permissive
2887 : diag::warn_lifetime_safety_loan_expires_strict)
2888 << IssueExpr->getSourceRange();
2889 S.Diag(Loc: FreeLoc, DiagID: diag::note_lifetime_safety_destroyed_here);
2890 S.Diag(Loc: UseExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_used_here)
2891 << UseExpr->getSourceRange();
2892 }
2893
2894 void reportUseAfterReturn(const Expr *IssueExpr, const Expr *ReturnExpr,
2895 SourceLocation ExpiryLoc, Confidence C) override {
2896 S.Diag(Loc: IssueExpr->getExprLoc(),
2897 DiagID: C == Confidence::Definite
2898 ? diag::warn_lifetime_safety_return_stack_addr_permissive
2899 : diag::warn_lifetime_safety_return_stack_addr_strict)
2900 << IssueExpr->getSourceRange();
2901 S.Diag(Loc: ReturnExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_returned_here)
2902 << ReturnExpr->getSourceRange();
2903 }
2904 void reportDanglingField(const Expr *IssueExpr,
2905 const FieldDecl *DanglingField,
2906 SourceLocation ExpiryLoc) override {
2907 S.Diag(Loc: IssueExpr->getExprLoc(), DiagID: diag::warn_lifetime_safety_dangling_field)
2908 << IssueExpr->getSourceRange();
2909 S.Diag(Loc: DanglingField->getLocation(),
2910 DiagID: diag::note_lifetime_safety_dangling_field_here)
2911 << DanglingField->getEndLoc();
2912 }
2913
2914 void suggestLifetimeboundToParmVar(SuggestionScope Scope,
2915 const ParmVarDecl *ParmToAnnotate,
2916 const Expr *EscapeExpr) override {
2917 unsigned DiagID =
2918 (Scope == SuggestionScope::CrossTU)
2919 ? diag::warn_lifetime_safety_cross_tu_param_suggestion
2920 : diag::warn_lifetime_safety_intra_tu_param_suggestion;
2921 SourceLocation InsertionPoint = Lexer::getLocForEndOfToken(
2922 Loc: ParmToAnnotate->getEndLoc(), Offset: 0, SM: S.getSourceManager(), LangOpts: S.getLangOpts());
2923 S.Diag(Loc: ParmToAnnotate->getBeginLoc(), DiagID)
2924 << ParmToAnnotate->getSourceRange()
2925 << FixItHint::CreateInsertion(InsertionLoc: InsertionPoint,
2926 Code: " [[clang::lifetimebound]]");
2927 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2928 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2929 << EscapeExpr->getSourceRange();
2930 }
2931
2932 void suggestLifetimeboundToImplicitThis(SuggestionScope Scope,
2933 const CXXMethodDecl *MD,
2934 const Expr *EscapeExpr) override {
2935 unsigned DiagID = (Scope == SuggestionScope::CrossTU)
2936 ? diag::warn_lifetime_safety_cross_tu_this_suggestion
2937 : diag::warn_lifetime_safety_intra_tu_this_suggestion;
2938 SourceLocation InsertionPoint;
2939 InsertionPoint = Lexer::getLocForEndOfToken(
2940 Loc: MD->getTypeSourceInfo()->getTypeLoc().getEndLoc(), Offset: 0,
2941 SM: S.getSourceManager(), LangOpts: S.getLangOpts());
2942 S.Diag(Loc: InsertionPoint, DiagID)
2943 << MD->getNameInfo().getSourceRange()
2944 << FixItHint::CreateInsertion(InsertionLoc: InsertionPoint,
2945 Code: " [[clang::lifetimebound]]");
2946 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2947 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2948 << EscapeExpr->getSourceRange();
2949 }
2950
2951 void reportNoescapeViolation(const ParmVarDecl *ParmWithNoescape,
2952 const Expr *EscapeExpr) override {
2953 S.Diag(Loc: ParmWithNoescape->getBeginLoc(),
2954 DiagID: diag::warn_lifetime_safety_noescape_escapes)
2955 << ParmWithNoescape->getSourceRange();
2956
2957 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2958 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2959 << EscapeExpr->getSourceRange();
2960 }
2961
2962 void reportNoescapeViolation(const ParmVarDecl *ParmWithNoescape,
2963 const FieldDecl *EscapeField) override {
2964 S.Diag(Loc: ParmWithNoescape->getBeginLoc(),
2965 DiagID: diag::warn_lifetime_safety_noescape_escapes)
2966 << ParmWithNoescape->getSourceRange();
2967
2968 S.Diag(Loc: EscapeField->getLocation(),
2969 DiagID: diag::note_lifetime_safety_escapes_to_field_here)
2970 << EscapeField->getEndLoc();
2971 }
2972
2973 void addLifetimeBoundToImplicitThis(const CXXMethodDecl *MD) override {
2974 S.addLifetimeBoundToImplicitThis(MD: const_cast<CXXMethodDecl *>(MD));
2975 }
2976
2977private:
2978 Sema &S;
2979};
2980} // namespace
2981} // namespace clang::lifetimes
2982
2983static void
2984LifetimeSafetyTUAnalysis(Sema &S, TranslationUnitDecl *TU,
2985 clang::lifetimes::LifetimeSafetyStats &LSStats) {
2986 llvm::TimeTraceScope TimeProfile("LifetimeSafetyTUAnalysis");
2987 CallGraph CG;
2988 CG.addToCallGraph(D: TU);
2989 lifetimes::LifetimeSafetySemaHelperImpl SemaHelper(S);
2990 for (auto *Node : llvm::post_order(G: &CG)) {
2991 const clang::FunctionDecl *CanonicalFD =
2992 dyn_cast_or_null<clang::FunctionDecl>(Val: Node->getDecl());
2993 if (!CanonicalFD)
2994 continue;
2995 const FunctionDecl *FD = CanonicalFD->getDefinition();
2996 if (!FD)
2997 continue;
2998 AnalysisDeclContext AC(nullptr, FD);
2999 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = false;
3000 AC.getCFGBuildOptions().AddLifetime = true;
3001 AC.getCFGBuildOptions().AddParameterLifetimes = true;
3002 AC.getCFGBuildOptions().AddImplicitDtors = true;
3003 AC.getCFGBuildOptions().AddTemporaryDtors = true;
3004 AC.getCFGBuildOptions().setAllAlwaysAdd();
3005 if (AC.getCFG())
3006 runLifetimeSafetyAnalysis(AC, SemaHelper: &SemaHelper, Stats&: LSStats, CollectStats: S.CollectStats);
3007 }
3008}
3009
3010void clang::sema::AnalysisBasedWarnings::IssueWarnings(
3011 TranslationUnitDecl *TU) {
3012 if (!TU)
3013 return; // This is unexpected, give up quietly.
3014
3015 DiagnosticsEngine &Diags = S.getDiagnostics();
3016
3017 if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
3018 // exit if having uncompilable errors or ignoring all warnings:
3019 return;
3020
3021 DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
3022
3023 // UnsafeBufferUsage analysis settings.
3024 bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
3025 bool UnsafeBufferUsageShouldEmitSuggestions = // Should != Can.
3026 UnsafeBufferUsageCanEmitSuggestions &&
3027 DiagOpts.ShowSafeBufferUsageSuggestions;
3028 bool UnsafeBufferUsageShouldSuggestSuggestions =
3029 UnsafeBufferUsageCanEmitSuggestions &&
3030 !DiagOpts.ShowSafeBufferUsageSuggestions;
3031 UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
3032
3033 // The Callback function that performs analyses:
3034 auto CallAnalyzers = [&](const Decl *Node) -> void {
3035 if (Node->hasAttr<UnsafeBufferUsageAttr>())
3036 return;
3037
3038 // Perform unsafe buffer usage analysis:
3039 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation,
3040 Loc: Node->getBeginLoc()) ||
3041 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable,
3042 Loc: Node->getBeginLoc()) ||
3043 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
3044 Loc: Node->getBeginLoc()) ||
3045 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call,
3046 Loc: Node->getBeginLoc())) {
3047 clang::checkUnsafeBufferUsage(D: Node, Handler&: R,
3048 EmitSuggestions: UnsafeBufferUsageShouldEmitSuggestions);
3049 }
3050
3051 // More analysis ...
3052 };
3053 // Emit per-function analysis-based warnings that require the whole-TU
3054 // reasoning. Check if any of them is enabled at all before scanning the AST:
3055 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation, Loc: SourceLocation()) ||
3056 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable, Loc: SourceLocation()) ||
3057 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
3058 Loc: SourceLocation()) ||
3059 (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc: SourceLocation()) &&
3060 S.getLangOpts().CPlusPlus /* only warn about libc calls in C++ */)) {
3061 CallableVisitor(CallAnalyzers, TU->getOwningModule())
3062 .TraverseTranslationUnitDecl(D: TU);
3063 }
3064
3065 if (S.getLangOpts().EnableLifetimeSafety && S.getLangOpts().CPlusPlus &&
3066 S.getLangOpts().EnableLifetimeSafetyTUAnalysis)
3067 LifetimeSafetyTUAnalysis(S, TU, LSStats);
3068}
3069
3070void clang::sema::AnalysisBasedWarnings::IssueWarnings(
3071 sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
3072 const Decl *D, QualType BlockType) {
3073
3074 // We avoid doing analysis-based warnings when there are errors for
3075 // two reasons:
3076 // (1) The CFGs often can't be constructed (if the body is invalid), so
3077 // don't bother trying.
3078 // (2) The code already has problems; running the analysis just takes more
3079 // time.
3080 DiagnosticsEngine &Diags = S.getDiagnostics();
3081
3082 // Do not do any analysis if we are going to just ignore them.
3083 if (Diags.getIgnoreAllWarnings() ||
3084 (Diags.getSuppressSystemWarnings() &&
3085 S.SourceMgr.isInSystemHeader(Loc: D->getLocation())))
3086 return;
3087
3088 // For code in dependent contexts, we'll do this at instantiation time.
3089 if (cast<DeclContext>(Val: D)->isDependentContext())
3090 return;
3091
3092 if (S.hasUncompilableErrorOccurred()) {
3093 // Flush out any possibly unreachable diagnostics.
3094 flushDiagnostics(S, fscope);
3095 return;
3096 }
3097
3098 const Stmt *Body = D->getBody();
3099 assert(Body);
3100
3101 // Construct the analysis context with the specified CFG build options.
3102 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
3103
3104 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
3105 // explosion for destructors that can result and the compile time hit.
3106 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
3107 AC.getCFGBuildOptions().AddEHEdges = false;
3108 AC.getCFGBuildOptions().AddInitializers = true;
3109 AC.getCFGBuildOptions().AddImplicitDtors = true;
3110 AC.getCFGBuildOptions().AddParameterLifetimes = true;
3111 AC.getCFGBuildOptions().AddTemporaryDtors = true;
3112 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
3113 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
3114
3115 bool IsLifetimeSafetyDiagnosticEnabled =
3116 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_loan_expires_permissive,
3117 Loc: D->getBeginLoc()) ||
3118 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_loan_expires_strict,
3119 Loc: D->getBeginLoc()) ||
3120 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_return_stack_addr_permissive,
3121 Loc: D->getBeginLoc()) ||
3122 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_return_stack_addr_strict,
3123 Loc: D->getBeginLoc()) ||
3124 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_noescape_escapes,
3125 Loc: D->getBeginLoc());
3126 bool EnableLifetimeSafetyAnalysis =
3127 S.getLangOpts().EnableLifetimeSafety &&
3128 !S.getLangOpts().EnableLifetimeSafetyTUAnalysis &&
3129 IsLifetimeSafetyDiagnosticEnabled;
3130
3131 // Force that certain expressions appear as CFGElements in the CFG. This
3132 // is used to speed up various analyses.
3133 // FIXME: This isn't the right factoring. This is here for initial
3134 // prototyping, but we need a way for analyses to say what expressions they
3135 // expect to always be CFGElements and then fill in the BuildOptions
3136 // appropriately. This is essentially a layering violation.
3137 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
3138 P.enableConsumedAnalysis || EnableLifetimeSafetyAnalysis) {
3139 // Unreachable code analysis and thread safety require a linearized CFG.
3140 AC.getCFGBuildOptions().setAllAlwaysAdd();
3141 } else {
3142 AC.getCFGBuildOptions()
3143 .setAlwaysAdd(stmtClass: Stmt::BinaryOperatorClass)
3144 .setAlwaysAdd(stmtClass: Stmt::CompoundAssignOperatorClass)
3145 .setAlwaysAdd(stmtClass: Stmt::BlockExprClass)
3146 .setAlwaysAdd(stmtClass: Stmt::CStyleCastExprClass)
3147 .setAlwaysAdd(stmtClass: Stmt::DeclRefExprClass)
3148 .setAlwaysAdd(stmtClass: Stmt::ImplicitCastExprClass)
3149 .setAlwaysAdd(stmtClass: Stmt::UnaryOperatorClass);
3150 }
3151 if (EnableLifetimeSafetyAnalysis)
3152 AC.getCFGBuildOptions().AddLifetime = true;
3153
3154 // Install the logical handler.
3155 std::optional<LogicalErrorHandler> LEH;
3156 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3157 LEH.emplace(args&: S);
3158 AC.getCFGBuildOptions().Observer = &*LEH;
3159 }
3160
3161 // Emit delayed diagnostics.
3162 auto &PUDs = fscope->PossiblyUnreachableDiags;
3163 emitPossiblyUnreachableDiags(S, AC, PUDs: std::make_pair(x: PUDs.begin(), y: PUDs.end()));
3164
3165 // Warning: check missing 'return'
3166 if (P.enableCheckFallThrough) {
3167 const CheckFallThroughDiagnostics &CD =
3168 (isa<BlockDecl>(Val: D) ? CheckFallThroughDiagnostics::MakeForBlock()
3169 : (isa<CXXMethodDecl>(Val: D) &&
3170 cast<CXXMethodDecl>(Val: D)->getOverloadedOperator() == OO_Call &&
3171 cast<CXXMethodDecl>(Val: D)->getParent()->isLambda())
3172 ? CheckFallThroughDiagnostics::MakeForLambda()
3173 : (fscope->isCoroutine()
3174 ? CheckFallThroughDiagnostics::MakeForCoroutine(Func: D)
3175 : CheckFallThroughDiagnostics::MakeForFunction(S, Func: D)));
3176 CheckFallThroughForBody(S, D, Body, BlockType, CD, AC);
3177 }
3178
3179 // Warning: check for unreachable code
3180 if (P.enableCheckUnreachable) {
3181 // Only check for unreachable code on non-template instantiations.
3182 // Different template instantiations can effectively change the control-flow
3183 // and it is very difficult to prove that a snippet of code in a template
3184 // is unreachable for all instantiations.
3185 bool isTemplateInstantiation = false;
3186 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: D))
3187 isTemplateInstantiation = Function->isTemplateInstantiation();
3188 if (!isTemplateInstantiation)
3189 CheckUnreachable(S, AC);
3190 }
3191
3192 // Check for thread safety violations
3193 if (P.enableThreadSafetyAnalysis) {
3194 SourceLocation FL = AC.getDecl()->getLocation();
3195 SourceLocation FEL = AC.getDecl()->getEndLoc();
3196 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
3197 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_beta, Loc: D->getBeginLoc()))
3198 Reporter.setIssueBetaWarnings(true);
3199 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_verbose, Loc: D->getBeginLoc()))
3200 Reporter.setVerbose(true);
3201
3202 threadSafety::runThreadSafetyAnalysis(AC, Handler&: Reporter,
3203 Bset: &S.ThreadSafetyDeclCache);
3204 Reporter.emitDiagnostics();
3205 }
3206
3207 // Check for violations of consumed properties.
3208 if (P.enableConsumedAnalysis) {
3209 consumed::ConsumedWarningsHandler WarningHandler(S);
3210 consumed::ConsumedAnalyzer Analyzer(WarningHandler);
3211 Analyzer.run(AC);
3212 }
3213
3214 if (!Diags.isIgnored(DiagID: diag::warn_uninit_var, Loc: D->getBeginLoc()) ||
3215 !Diags.isIgnored(DiagID: diag::warn_sometimes_uninit_var, Loc: D->getBeginLoc()) ||
3216 !Diags.isIgnored(DiagID: diag::warn_maybe_uninit_var, Loc: D->getBeginLoc()) ||
3217 !Diags.isIgnored(DiagID: diag::warn_uninit_const_reference, Loc: D->getBeginLoc()) ||
3218 !Diags.isIgnored(DiagID: diag::warn_uninit_const_pointer, Loc: D->getBeginLoc())) {
3219 if (CFG *cfg = AC.getCFG()) {
3220 UninitValsDiagReporter reporter(S);
3221 UninitVariablesAnalysisStats stats;
3222 std::memset(s: &stats, c: 0, n: sizeof(UninitVariablesAnalysisStats));
3223 runUninitializedVariablesAnalysis(dc: *cast<DeclContext>(Val: D), cfg: *cfg, ac&: AC,
3224 handler&: reporter, stats);
3225
3226 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
3227 ++NumUninitAnalysisFunctions;
3228 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
3229 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
3230 MaxUninitAnalysisVariablesPerFunction =
3231 std::max(a: MaxUninitAnalysisVariablesPerFunction,
3232 b: stats.NumVariablesAnalyzed);
3233 MaxUninitAnalysisBlockVisitsPerFunction =
3234 std::max(a: MaxUninitAnalysisBlockVisitsPerFunction,
3235 b: stats.NumBlockVisits);
3236 }
3237 }
3238 }
3239
3240 // TODO: Enable lifetime safety analysis for other languages once it is
3241 // stable.
3242 if (EnableLifetimeSafetyAnalysis && S.getLangOpts().CPlusPlus) {
3243 if (AC.getCFG()) {
3244 lifetimes::LifetimeSafetySemaHelperImpl LifetimeSafetySemaHelper(S);
3245 lifetimes::runLifetimeSafetyAnalysis(AC, SemaHelper: &LifetimeSafetySemaHelper,
3246 Stats&: LSStats, CollectStats: S.CollectStats);
3247 }
3248 }
3249 // Check for violations of "called once" parameter properties.
3250 if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
3251 shouldAnalyzeCalledOnceParameters(Diags, At: D->getBeginLoc())) {
3252 if (AC.getCFG()) {
3253 CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
3254 checkCalledOnceParameters(
3255 AC, Handler&: Reporter,
3256 CheckConventionalParameters: shouldAnalyzeCalledOnceConventions(Diags, At: D->getBeginLoc()));
3257 }
3258 }
3259
3260 bool FallThroughDiagFull =
3261 !Diags.isIgnored(DiagID: diag::warn_unannotated_fallthrough, Loc: D->getBeginLoc());
3262 bool FallThroughDiagPerFunction = !Diags.isIgnored(
3263 DiagID: diag::warn_unannotated_fallthrough_per_function, Loc: D->getBeginLoc());
3264 if (FallThroughDiagFull || FallThroughDiagPerFunction ||
3265 fscope->HasFallthroughStmt) {
3266 DiagnoseSwitchLabelsFallthrough(S, AC, PerFunction: !FallThroughDiagFull);
3267 }
3268
3269 if (S.getLangOpts().ObjCWeak &&
3270 !Diags.isIgnored(DiagID: diag::warn_arc_repeated_use_of_weak, Loc: D->getBeginLoc()))
3271 diagnoseRepeatedUseOfWeak(S, CurFn: fscope, D, PM: AC.getParentMap());
3272
3273
3274 // Check for infinite self-recursion in functions
3275 if (!Diags.isIgnored(DiagID: diag::warn_infinite_recursive_function,
3276 Loc: D->getBeginLoc())) {
3277 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
3278 checkRecursiveFunction(S, FD, Body, AC);
3279 }
3280 }
3281
3282 // Check for throw out of non-throwing function.
3283 if (!Diags.isIgnored(DiagID: diag::warn_throw_in_noexcept_func, Loc: D->getBeginLoc()))
3284 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D))
3285 if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
3286 checkThrowInNonThrowingFunc(S, FD, AC);
3287
3288 // If none of the previous checks caused a CFG build, trigger one here
3289 // for the logical error handler.
3290 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3291 AC.getCFG();
3292 }
3293
3294 // Clear any of our policy overrides.
3295 clearOverrides();
3296
3297 // Collect statistics about the CFG if it was built.
3298 if (S.CollectStats && AC.isCFGBuilt()) {
3299 ++NumFunctionsAnalyzed;
3300 if (CFG *cfg = AC.getCFG()) {
3301 // If we successfully built a CFG for this context, record some more
3302 // detail information about it.
3303 NumCFGBlocks += cfg->getNumBlockIDs();
3304 MaxCFGBlocksPerFunction = std::max(a: MaxCFGBlocksPerFunction,
3305 b: cfg->getNumBlockIDs());
3306 } else {
3307 ++NumFunctionsWithBadCFGs;
3308 }
3309 }
3310}
3311
3312void clang::sema::AnalysisBasedWarnings::PrintStats() const {
3313 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
3314
3315 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
3316 unsigned AvgCFGBlocksPerFunction =
3317 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
3318 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
3319 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
3320 << " " << NumCFGBlocks << " CFG blocks built.\n"
3321 << " " << AvgCFGBlocksPerFunction
3322 << " average CFG blocks per function.\n"
3323 << " " << MaxCFGBlocksPerFunction
3324 << " max CFG blocks per function.\n";
3325
3326 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
3327 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
3328 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
3329 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
3330 llvm::errs() << NumUninitAnalysisFunctions
3331 << " functions analyzed for uninitialiazed variables\n"
3332 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
3333 << " " << AvgUninitVariablesPerFunction
3334 << " average variables per function.\n"
3335 << " " << MaxUninitAnalysisVariablesPerFunction
3336 << " max variables per function.\n"
3337 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
3338 << " " << AvgUninitBlockVisitsPerFunction
3339 << " average block visits per function.\n"
3340 << " " << MaxUninitAnalysisBlockVisitsPerFunction
3341 << " max block visits per function.\n";
3342 clang::lifetimes::printStats(Stats: LSStats);
3343}
3344