1//=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines analysis_warnings::[Policy,Executor].
10// Together they are used by Sema to issue warnings based on inexpensive
11// static analysis algorithms in libAnalysis.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/Sema/AnalysisBasedWarnings.h"
16#include "TypeLocBuilder.h"
17#include "clang/AST/Decl.h"
18#include "clang/AST/DeclCXX.h"
19#include "clang/AST/DeclObjC.h"
20#include "clang/AST/DynamicRecursiveASTVisitor.h"
21#include "clang/AST/EvaluatedExprVisitor.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/AST/ExprObjC.h"
25#include "clang/AST/OperationKinds.h"
26#include "clang/AST/ParentMap.h"
27#include "clang/AST/StmtCXX.h"
28#include "clang/AST/StmtObjC.h"
29#include "clang/AST/StmtVisitor.h"
30#include "clang/AST/Type.h"
31#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
32#include "clang/Analysis/Analyses/CalledOnceCheck.h"
33#include "clang/Analysis/Analyses/Consumed.h"
34#include "clang/Analysis/Analyses/LifetimeSafety/LifetimeAnnotations.h"
35#include "clang/Analysis/Analyses/LifetimeSafety/LifetimeSafety.h"
36#include "clang/Analysis/Analyses/ReachableCode.h"
37#include "clang/Analysis/Analyses/ThreadSafety.h"
38#include "clang/Analysis/Analyses/UninitializedValues.h"
39#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
40#include "clang/Analysis/AnalysisDeclContext.h"
41#include "clang/Analysis/CFG.h"
42#include "clang/Analysis/CallGraph.h"
43#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
44#include "clang/Basic/Diagnostic.h"
45#include "clang/Basic/DiagnosticSema.h"
46#include "clang/Basic/SourceLocation.h"
47#include "clang/Basic/SourceManager.h"
48#include "clang/Lex/Preprocessor.h"
49#include "clang/Sema/ScopeInfo.h"
50#include "clang/Sema/SemaInternal.h"
51#include "llvm/ADT/ArrayRef.h"
52#include "llvm/ADT/BitVector.h"
53#include "llvm/ADT/DenseMap.h"
54#include "llvm/ADT/MapVector.h"
55#include "llvm/ADT/PostOrderIterator.h"
56#include "llvm/ADT/STLFunctionalExtras.h"
57#include "llvm/ADT/SmallVector.h"
58#include "llvm/ADT/StringRef.h"
59#include "llvm/Support/Debug.h"
60#include "llvm/Support/TimeProfiler.h"
61#include <algorithm>
62#include <deque>
63#include <iterator>
64#include <optional>
65
66using namespace clang;
67
68//===----------------------------------------------------------------------===//
69// Unreachable code analysis.
70//===----------------------------------------------------------------------===//
71
72namespace {
73 class UnreachableCodeHandler : public reachable_code::Callback {
74 Sema &S;
75 SourceRange PreviousSilenceableCondVal;
76
77 public:
78 UnreachableCodeHandler(Sema &s) : S(s) {}
79
80 void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
81 SourceRange SilenceableCondVal, SourceRange R1,
82 SourceRange R2, bool HasFallThroughAttr) override {
83 // If the diagnosed code is `[[fallthrough]];` and
84 // `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
85 // be executed` warning to avoid generating diagnostic twice
86 if (HasFallThroughAttr &&
87 !S.getDiagnostics().isIgnored(DiagID: diag::warn_unreachable_fallthrough_attr,
88 Loc: SourceLocation()))
89 return;
90
91 // Avoid reporting multiple unreachable code diagnostics that are
92 // triggered by the same conditional value.
93 if (PreviousSilenceableCondVal.isValid() &&
94 SilenceableCondVal.isValid() &&
95 PreviousSilenceableCondVal == SilenceableCondVal)
96 return;
97 PreviousSilenceableCondVal = SilenceableCondVal;
98
99 unsigned diag = diag::warn_unreachable;
100 switch (UK) {
101 case reachable_code::UK_Break:
102 diag = diag::warn_unreachable_break;
103 break;
104 case reachable_code::UK_Return:
105 diag = diag::warn_unreachable_return;
106 break;
107 case reachable_code::UK_Loop_Increment:
108 diag = diag::warn_unreachable_loop_increment;
109 break;
110 case reachable_code::UK_Other:
111 break;
112 }
113
114 S.Diag(Loc: L, DiagID: diag) << R1 << R2;
115
116 SourceLocation Open = SilenceableCondVal.getBegin();
117 if (Open.isValid()) {
118 SourceLocation Close = SilenceableCondVal.getEnd();
119 Close = S.getLocForEndOfToken(Loc: Close);
120 if (Close.isValid()) {
121 S.Diag(Loc: Open, DiagID: diag::note_unreachable_silence)
122 << FixItHint::CreateInsertion(InsertionLoc: Open, Code: "/* DISABLES CODE */ (")
123 << FixItHint::CreateInsertion(InsertionLoc: Close, Code: ")");
124 }
125 }
126 }
127 };
128} // anonymous namespace
129
130/// CheckUnreachable - Check for unreachable code.
131static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
132 // As a heuristic prune all diagnostics not in the main file. Currently
133 // the majority of warnings in headers are false positives. These
134 // are largely caused by configuration state, e.g. preprocessor
135 // defined code, etc.
136 //
137 // Note that this is also a performance optimization. Analyzing
138 // headers many times can be expensive.
139 if (!S.getSourceManager().isInMainFile(Loc: AC.getDecl()->getBeginLoc()))
140 return;
141
142 UnreachableCodeHandler UC(S);
143 reachable_code::FindUnreachableCode(AC, PP&: S.getPreprocessor(), CB&: UC);
144}
145
146namespace {
147/// Warn on logical operator errors in CFGBuilder
148class LogicalErrorHandler : public CFGCallback {
149 Sema &S;
150
151public:
152 LogicalErrorHandler(Sema &S) : S(S) {}
153
154 static bool HasMacroID(const Expr *E) {
155 if (E->getExprLoc().isMacroID())
156 return true;
157
158 // Recurse to children.
159 for (const Stmt *SubStmt : E->children())
160 if (const Expr *SubExpr = dyn_cast_or_null<Expr>(Val: SubStmt))
161 if (HasMacroID(E: SubExpr))
162 return true;
163
164 return false;
165 }
166
167 void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
168 if (HasMacroID(E: B))
169 return;
170
171 unsigned DiagID = isAlwaysTrue
172 ? diag::warn_tautological_negation_or_compare
173 : diag::warn_tautological_negation_and_compare;
174 SourceRange DiagRange = B->getSourceRange();
175 S.Diag(Loc: B->getExprLoc(), DiagID) << DiagRange;
176 }
177
178 void compareAlwaysTrue(const BinaryOperator *B,
179 bool isAlwaysTrueOrFalse) override {
180 if (HasMacroID(E: B))
181 return;
182
183 SourceRange DiagRange = B->getSourceRange();
184 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_tautological_overlap_comparison)
185 << DiagRange << isAlwaysTrueOrFalse;
186 }
187
188 void compareBitwiseEquality(const BinaryOperator *B,
189 bool isAlwaysTrue) override {
190 if (HasMacroID(E: B))
191 return;
192
193 SourceRange DiagRange = B->getSourceRange();
194 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_always)
195 << DiagRange << isAlwaysTrue;
196 }
197
198 void compareBitwiseOr(const BinaryOperator *B) override {
199 if (HasMacroID(E: B))
200 return;
201
202 SourceRange DiagRange = B->getSourceRange();
203 S.Diag(Loc: B->getExprLoc(), DiagID: diag::warn_comparison_bitwise_or) << DiagRange;
204 }
205
206 static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
207 SourceLocation Loc) {
208 return !Diags.isIgnored(DiagID: diag::warn_tautological_overlap_comparison, Loc) ||
209 !Diags.isIgnored(DiagID: diag::warn_comparison_bitwise_or, Loc) ||
210 !Diags.isIgnored(DiagID: diag::warn_tautological_negation_and_compare, Loc);
211 }
212};
213} // anonymous namespace
214
215//===----------------------------------------------------------------------===//
216// Check for infinite self-recursion in functions
217//===----------------------------------------------------------------------===//
218
219// Returns true if the function is called anywhere within the CFGBlock.
220// For member functions, the additional condition of being call from the
221// this pointer is required.
222static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
223 // Process all the Stmt's in this block to find any calls to FD.
224 for (const auto &B : Block) {
225 if (B.getKind() != CFGElement::Statement)
226 continue;
227
228 const CallExpr *CE = dyn_cast<CallExpr>(Val: B.getAs<CFGStmt>()->getStmt());
229 if (!CE || !CE->getCalleeDecl() ||
230 CE->getCalleeDecl()->getCanonicalDecl() != FD)
231 continue;
232
233 // Skip function calls which are qualified with a templated class.
234 if (const DeclRefExpr *DRE =
235 dyn_cast<DeclRefExpr>(Val: CE->getCallee()->IgnoreParenImpCasts()))
236 if (NestedNameSpecifier NNS = DRE->getQualifier();
237 NNS.getKind() == NestedNameSpecifier::Kind::Type)
238 if (isa_and_nonnull<TemplateSpecializationType>(Val: NNS.getAsType()))
239 continue;
240
241 const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(Val: CE);
242 if (!MCE || isa<CXXThisExpr>(Val: MCE->getImplicitObjectArgument()) ||
243 !MCE->getMethodDecl()->isVirtual())
244 return true;
245 }
246 return false;
247}
248
249// Returns true if every path from the entry block passes through a call to FD.
250static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
251 llvm::SmallPtrSet<CFGBlock *, 16> Visited;
252 llvm::SmallVector<CFGBlock *, 16> WorkList;
253 // Keep track of whether we found at least one recursive path.
254 bool foundRecursion = false;
255
256 const unsigned ExitID = cfg->getExit().getBlockID();
257
258 // Seed the work list with the entry block.
259 WorkList.push_back(Elt: &cfg->getEntry());
260
261 while (!WorkList.empty()) {
262 CFGBlock *Block = WorkList.pop_back_val();
263
264 for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
265 if (CFGBlock *SuccBlock = *I) {
266 if (!Visited.insert(Ptr: SuccBlock).second)
267 continue;
268
269 // Found a path to the exit node without a recursive call.
270 if (ExitID == SuccBlock->getBlockID())
271 return false;
272
273 // If the successor block contains a recursive call, end analysis there.
274 if (hasRecursiveCallInPath(FD, Block&: *SuccBlock)) {
275 foundRecursion = true;
276 continue;
277 }
278
279 WorkList.push_back(Elt: SuccBlock);
280 }
281 }
282 }
283 return foundRecursion;
284}
285
286static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
287 const Stmt *Body, AnalysisDeclContext &AC) {
288 FD = FD->getCanonicalDecl();
289
290 // Only run on non-templated functions and non-templated members of
291 // templated classes.
292 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
293 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
294 return;
295
296 CFG *cfg = AC.getCFG();
297 if (!cfg) return;
298
299 // If the exit block is unreachable, skip processing the function.
300 if (cfg->getExit().pred_empty())
301 return;
302
303 // Emit diagnostic if a recursive function call is detected for all paths.
304 if (checkForRecursiveFunctionCall(FD, cfg))
305 S.Diag(Loc: Body->getBeginLoc(), DiagID: diag::warn_infinite_recursive_function);
306}
307
308//===----------------------------------------------------------------------===//
309// Check for throw in a non-throwing function.
310//===----------------------------------------------------------------------===//
311
312/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
313/// can reach ExitBlock.
314static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
315 CFG *Body) {
316 SmallVector<CFGBlock *, 16> Stack;
317 llvm::BitVector Queued(Body->getNumBlockIDs());
318
319 Stack.push_back(Elt: &ThrowBlock);
320 Queued[ThrowBlock.getBlockID()] = true;
321
322 while (!Stack.empty()) {
323 CFGBlock &UnwindBlock = *Stack.pop_back_val();
324
325 for (auto &Succ : UnwindBlock.succs()) {
326 if (!Succ.isReachable() || Queued[Succ->getBlockID()])
327 continue;
328
329 if (Succ->getBlockID() == Body->getExit().getBlockID())
330 return true;
331
332 if (auto *Catch =
333 dyn_cast_or_null<CXXCatchStmt>(Val: Succ->getLabel())) {
334 QualType Caught = Catch->getCaughtType();
335 if (Caught.isNull() || // catch (...) catches everything
336 !E->getSubExpr() || // throw; is considered cuaght by any handler
337 S.handlerCanCatch(HandlerType: Caught, ExceptionType: E->getSubExpr()->getType()))
338 // Exception doesn't escape via this path.
339 break;
340 } else {
341 Stack.push_back(Elt: Succ);
342 Queued[Succ->getBlockID()] = true;
343 }
344 }
345 }
346
347 return false;
348}
349
350static void visitReachableThrows(
351 CFG *BodyCFG,
352 llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
353 llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
354 clang::reachable_code::ScanReachableFromBlock(Start: &BodyCFG->getEntry(), Reachable);
355 for (CFGBlock *B : *BodyCFG) {
356 if (!Reachable[B->getBlockID()])
357 continue;
358 for (CFGElement &E : *B) {
359 std::optional<CFGStmt> S = E.getAs<CFGStmt>();
360 if (!S)
361 continue;
362 if (auto *Throw = dyn_cast<CXXThrowExpr>(Val: S->getStmt()))
363 Visit(Throw, *B);
364 }
365 }
366}
367
368static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
369 const FunctionDecl *FD) {
370 if (!S.getSourceManager().isInSystemHeader(Loc: OpLoc) &&
371 FD->getTypeSourceInfo()) {
372 S.Diag(Loc: OpLoc, DiagID: diag::warn_throw_in_noexcept_func) << FD;
373 if (S.getLangOpts().CPlusPlus11 &&
374 (isa<CXXDestructorDecl>(Val: FD) ||
375 FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
376 FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
377 if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
378 getAs<FunctionProtoType>())
379 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_dtor)
380 << !isa<CXXDestructorDecl>(Val: FD) << !Ty->hasExceptionSpec()
381 << FD->getExceptionSpecSourceRange();
382 } else
383 S.Diag(Loc: FD->getLocation(), DiagID: diag::note_throw_in_function)
384 << FD->getExceptionSpecSourceRange();
385 }
386}
387
388static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
389 AnalysisDeclContext &AC) {
390 CFG *BodyCFG = AC.getCFG();
391 if (!BodyCFG)
392 return;
393 if (BodyCFG->getExit().pred_empty())
394 return;
395 visitReachableThrows(BodyCFG, Visit: [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
396 if (throwEscapes(S, E: Throw, ThrowBlock&: Block, Body: BodyCFG))
397 EmitDiagForCXXThrowInNonThrowingFunc(S, OpLoc: Throw->getThrowLoc(), FD);
398 });
399}
400
401static bool isNoexcept(const FunctionDecl *FD) {
402 const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
403 if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
404 return true;
405 return false;
406}
407
408/// Checks if the given expression is a reference to a function with
409/// 'noreturn' attribute.
410static bool isReferenceToNoReturn(const Expr *E) {
411 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: E->IgnoreParenCasts()))
412 if (auto *FD = dyn_cast<FunctionDecl>(Val: DRef->getDecl()))
413 return FD->isNoReturn();
414 return false;
415}
416
417/// Checks if the given variable, which is assumed to be a function pointer, is
418/// initialized with a function having 'noreturn' attribute.
419static bool isInitializedWithNoReturn(const VarDecl *VD) {
420 if (const Expr *Init = VD->getInit()) {
421 if (auto *ListInit = dyn_cast<InitListExpr>(Val: Init);
422 ListInit && ListInit->getNumInits() > 0)
423 Init = ListInit->getInit(Init: 0);
424 return isReferenceToNoReturn(E: Init);
425 }
426 return false;
427}
428
429namespace {
430
431/// Looks for statements, that can define value of the given variable.
432struct TransferFunctions : public StmtVisitor<TransferFunctions> {
433 const VarDecl *Var;
434 std::optional<bool> AllValuesAreNoReturn;
435
436 TransferFunctions(const VarDecl *VD) : Var(VD) {}
437
438 void reset() { AllValuesAreNoReturn = std::nullopt; }
439
440 void VisitDeclStmt(DeclStmt *DS) {
441 for (auto *DI : DS->decls())
442 if (auto *VD = dyn_cast<VarDecl>(Val: DI))
443 if (VarDecl *Def = VD->getDefinition())
444 if (Def == Var)
445 AllValuesAreNoReturn = isInitializedWithNoReturn(VD: Def);
446 }
447
448 void VisitUnaryOperator(UnaryOperator *UO) {
449 if (UO->getOpcode() == UO_AddrOf) {
450 if (auto *DRef =
451 dyn_cast<DeclRefExpr>(Val: UO->getSubExpr()->IgnoreParenCasts()))
452 if (DRef->getDecl() == Var)
453 AllValuesAreNoReturn = false;
454 }
455 }
456
457 void VisitBinaryOperator(BinaryOperator *BO) {
458 if (BO->getOpcode() == BO_Assign)
459 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: BO->getLHS()->IgnoreParenCasts()))
460 if (DRef->getDecl() == Var)
461 AllValuesAreNoReturn = isReferenceToNoReturn(E: BO->getRHS());
462 }
463
464 void VisitCallExpr(CallExpr *CE) {
465 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end(); I != E;
466 ++I) {
467 const Expr *Arg = *I;
468 if (Arg->isGLValue() && !Arg->getType().isConstQualified())
469 if (auto *DRef = dyn_cast<DeclRefExpr>(Val: Arg->IgnoreParenCasts()))
470 if (auto VD = dyn_cast<VarDecl>(Val: DRef->getDecl()))
471 if (VD->getDefinition() == Var)
472 AllValuesAreNoReturn = false;
473 }
474 }
475};
476} // namespace
477
478// Checks if all possible values of the given variable are functions with
479// 'noreturn' attribute.
480static bool areAllValuesNoReturn(const VarDecl *VD, const CFGBlock &VarBlk,
481 AnalysisDeclContext &AC) {
482 // The set of possible values of a constant variable is determined by
483 // its initializer, unless it is a function parameter.
484 if (!isa<ParmVarDecl>(Val: VD) && VD->getType().isConstant(Ctx: AC.getASTContext())) {
485 if (const VarDecl *Def = VD->getDefinition())
486 return isInitializedWithNoReturn(VD: Def);
487 return false;
488 }
489
490 // In multithreaded environment the value of a global variable may be changed
491 // asynchronously.
492 if (!VD->getDeclContext()->isFunctionOrMethod())
493 return false;
494
495 // Check the condition "all values are noreturn". It is satisfied if the
496 // variable is set to "noreturn" value in the current block or all its
497 // predecessors satisfies the condition.
498 using MapTy = llvm::DenseMap<const CFGBlock *, std::optional<bool>>;
499 using ValueTy = MapTy::value_type;
500 MapTy BlocksToCheck;
501 BlocksToCheck[&VarBlk] = std::nullopt;
502 const auto BlockSatisfiesCondition = [](ValueTy Item) {
503 return Item.getSecond().value_or(u: false);
504 };
505
506 TransferFunctions TF(VD);
507 BackwardDataflowWorklist Worklist(*AC.getCFG(), AC);
508 llvm::DenseSet<const CFGBlock *> Visited;
509 Worklist.enqueueBlock(Block: &VarBlk);
510 while (const CFGBlock *B = Worklist.dequeue()) {
511 if (Visited.contains(V: B))
512 continue;
513 Visited.insert(V: B);
514 // First check the current block.
515 for (CFGBlock::const_reverse_iterator ri = B->rbegin(), re = B->rend();
516 ri != re; ++ri) {
517 if (std::optional<CFGStmt> cs = ri->getAs<CFGStmt>()) {
518 const Stmt *S = cs->getStmt();
519 TF.reset();
520 TF.Visit(S: const_cast<Stmt *>(S));
521 if (TF.AllValuesAreNoReturn) {
522 if (!TF.AllValuesAreNoReturn.value())
523 return false;
524 BlocksToCheck[B] = true;
525 break;
526 }
527 }
528 }
529
530 // If all checked blocks satisfy the condition, the check is finished.
531 if (llvm::all_of(Range&: BlocksToCheck, P: BlockSatisfiesCondition))
532 return true;
533
534 // If this block does not contain the variable definition, check
535 // its predecessors.
536 if (!BlocksToCheck[B]) {
537 Worklist.enqueuePredecessors(Block: B);
538 BlocksToCheck.erase(Val: B);
539 for (const auto &PredBlk : B->preds())
540 if (!BlocksToCheck.contains(Val: PredBlk))
541 BlocksToCheck[PredBlk] = std::nullopt;
542 }
543 }
544
545 return false;
546}
547
548//===----------------------------------------------------------------------===//
549// Check for missing return value.
550//===----------------------------------------------------------------------===//
551
552enum ControlFlowKind {
553 UnknownFallThrough,
554 NeverFallThrough,
555 MaybeFallThrough,
556 AlwaysFallThrough,
557 NeverFallThroughOrReturn
558};
559
560/// CheckFallThrough - Check that we don't fall off the end of a
561/// Statement that should return a value.
562///
563/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
564/// MaybeFallThrough iff we might or might not fall off the end,
565/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
566/// return. We assume NeverFallThrough iff we never fall off the end of the
567/// statement but we may return. We assume that functions not marked noreturn
568/// will return.
569static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
570 CFG *cfg = AC.getCFG();
571 if (!cfg) return UnknownFallThrough;
572
573 // The CFG leaves in dead things, and we don't want the dead code paths to
574 // confuse us, so we mark all live things first.
575 llvm::BitVector live(cfg->getNumBlockIDs());
576 reachable_code::ScanReachableFromBlock(Start: &cfg->getEntry(), Reachable&: live);
577
578 // Now we know what is live, we check the live precessors of the exit block
579 // and look for fall through paths, being careful to ignore normal returns,
580 // and exceptional paths.
581 bool HasLiveReturn = false;
582 bool HasFakeEdge = false;
583 bool HasPlainEdge = false;
584 bool HasAbnormalEdge = false;
585
586 // Ignore default cases that aren't likely to be reachable because all
587 // enums in a switch(X) have explicit case statements.
588 CFGBlock::FilterOptions FO;
589 FO.IgnoreDefaultsWithCoveredEnums = 1;
590
591 for (CFGBlock::filtered_pred_iterator I =
592 cfg->getExit().filtered_pred_start_end(f: FO);
593 I.hasMore(); ++I) {
594 const CFGBlock &B = **I;
595 if (!live[B.getBlockID()])
596 continue;
597
598 // Skip blocks which contain an element marked as no-return. They don't
599 // represent actually viable edges into the exit block, so mark them as
600 // abnormal.
601 if (B.hasNoReturnElement()) {
602 HasAbnormalEdge = true;
603 continue;
604 }
605
606 // Destructors can appear after the 'return' in the CFG. This is
607 // normal. We need to look pass the destructors for the return
608 // statement (if it exists).
609 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
610
611 for ( ; ri != re ; ++ri)
612 if (ri->getAs<CFGStmt>())
613 break;
614
615 // No more CFGElements in the block?
616 if (ri == re) {
617 const Stmt *Term = B.getTerminatorStmt();
618 if (Term && (isa<CXXTryStmt>(Val: Term) || isa<ObjCAtTryStmt>(Val: Term))) {
619 HasAbnormalEdge = true;
620 continue;
621 }
622 // A labeled empty statement, or the entry block...
623 HasPlainEdge = true;
624 continue;
625 }
626
627 CFGStmt CS = ri->castAs<CFGStmt>();
628 const Stmt *S = CS.getStmt();
629 if (isa<ReturnStmt>(Val: S) || isa<CoreturnStmt>(Val: S)) {
630 HasLiveReturn = true;
631 continue;
632 }
633 if (isa<ObjCAtThrowStmt>(Val: S)) {
634 HasFakeEdge = true;
635 continue;
636 }
637 if (isa<CXXThrowExpr>(Val: S)) {
638 HasFakeEdge = true;
639 continue;
640 }
641 if (isa<MSAsmStmt>(Val: S)) {
642 // TODO: Verify this is correct.
643 HasFakeEdge = true;
644 HasLiveReturn = true;
645 continue;
646 }
647 if (isa<CXXTryStmt>(Val: S)) {
648 HasAbnormalEdge = true;
649 continue;
650 }
651 if (!llvm::is_contained(Range: B.succs(), Element: &cfg->getExit())) {
652 HasAbnormalEdge = true;
653 continue;
654 }
655 if (auto *Call = dyn_cast<CallExpr>(Val: S)) {
656 const Expr *Callee = Call->getCallee();
657 if (Callee->getType()->isPointerType())
658 if (auto *DeclRef =
659 dyn_cast<DeclRefExpr>(Val: Callee->IgnoreParenImpCasts()))
660 if (auto *VD = dyn_cast<VarDecl>(Val: DeclRef->getDecl()))
661 if (areAllValuesNoReturn(VD, VarBlk: B, AC)) {
662 HasAbnormalEdge = true;
663 continue;
664 }
665 }
666
667 HasPlainEdge = true;
668 }
669 if (!HasPlainEdge) {
670 if (HasLiveReturn)
671 return NeverFallThrough;
672 return NeverFallThroughOrReturn;
673 }
674 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
675 return MaybeFallThrough;
676 // This says AlwaysFallThrough for calls to functions that are not marked
677 // noreturn, that don't return. If people would like this warning to be more
678 // accurate, such functions should be marked as noreturn.
679 return AlwaysFallThrough;
680}
681
682namespace {
683
684struct CheckFallThroughDiagnostics {
685 unsigned diag_FallThrough_HasNoReturn = 0;
686 unsigned diag_FallThrough_ReturnsNonVoid = 0;
687 unsigned diag_NeverFallThroughOrReturn = 0;
688 unsigned FunKind; // TODO: use diag::FalloffFunctionKind
689 SourceLocation FuncLoc;
690
691 static CheckFallThroughDiagnostics MakeForFunction(Sema &S,
692 const Decl *Func) {
693 CheckFallThroughDiagnostics D;
694 D.FuncLoc = Func->getLocation();
695 D.diag_FallThrough_HasNoReturn = diag::warn_noreturn_has_return_expr;
696 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
697
698 // Don't suggest that virtual functions be marked "noreturn", since they
699 // might be overridden by non-noreturn functions.
700 bool isVirtualMethod = false;
701 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Val: Func))
702 isVirtualMethod = Method->isVirtual();
703
704 // Don't suggest that template instantiations be marked "noreturn"
705 bool isTemplateInstantiation = false;
706 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: Func)) {
707 isTemplateInstantiation = Function->isTemplateInstantiation();
708 if (!S.getLangOpts().CPlusPlus && !S.getLangOpts().C99 &&
709 Function->isMain()) {
710 D.diag_FallThrough_ReturnsNonVoid = diag::ext_main_no_return;
711 }
712 }
713
714 if (!isVirtualMethod && !isTemplateInstantiation)
715 D.diag_NeverFallThroughOrReturn = diag::warn_suggest_noreturn_function;
716
717 D.FunKind = diag::FalloffFunctionKind::Function;
718 return D;
719 }
720
721 static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
722 CheckFallThroughDiagnostics D;
723 D.FuncLoc = Func->getLocation();
724 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
725 D.FunKind = diag::FalloffFunctionKind::Coroutine;
726 return D;
727 }
728
729 static CheckFallThroughDiagnostics MakeForBlock() {
730 CheckFallThroughDiagnostics D;
731 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
732 D.diag_FallThrough_ReturnsNonVoid = diag::err_falloff_nonvoid;
733 D.FunKind = diag::FalloffFunctionKind::Block;
734 return D;
735 }
736
737 static CheckFallThroughDiagnostics MakeForLambda() {
738 CheckFallThroughDiagnostics D;
739 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
740 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
741 D.FunKind = diag::FalloffFunctionKind::Lambda;
742 return D;
743 }
744
745 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
746 bool HasNoReturn) const {
747 if (FunKind == diag::FalloffFunctionKind::Function) {
748 return (ReturnsVoid ||
749 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
750 (!HasNoReturn ||
751 D.isIgnored(DiagID: diag::warn_noreturn_has_return_expr, Loc: FuncLoc)) &&
752 (!ReturnsVoid ||
753 D.isIgnored(DiagID: diag::warn_suggest_noreturn_block, Loc: FuncLoc));
754 }
755 if (FunKind == diag::FalloffFunctionKind::Coroutine) {
756 return (ReturnsVoid ||
757 D.isIgnored(DiagID: diag::warn_falloff_nonvoid, Loc: FuncLoc)) &&
758 (!HasNoReturn);
759 }
760 // For blocks / lambdas.
761 return ReturnsVoid && !HasNoReturn;
762 }
763};
764
765} // anonymous namespace
766
767/// CheckFallThroughForBody - Check that we don't fall off the end of a
768/// function that should return a value. Check that we don't fall off the end
769/// of a noreturn function. We assume that functions and blocks not marked
770/// noreturn will return.
771static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
772 QualType BlockType,
773 const CheckFallThroughDiagnostics &CD,
774 AnalysisDeclContext &AC) {
775
776 bool ReturnsVoid = false;
777 bool HasNoReturn = false;
778
779 if (const auto *FD = dyn_cast<FunctionDecl>(Val: D)) {
780 if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Val: Body))
781 ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
782 else
783 ReturnsVoid = FD->getReturnType()->isVoidType();
784 HasNoReturn = FD->isNoReturn() || FD->hasAttr<InferredNoReturnAttr>();
785 }
786 else if (const auto *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
787 ReturnsVoid = MD->getReturnType()->isVoidType();
788 HasNoReturn = MD->hasAttr<NoReturnAttr>();
789 }
790 else if (isa<BlockDecl>(Val: D)) {
791 if (const FunctionType *FT =
792 BlockType->getPointeeType()->getAs<FunctionType>()) {
793 if (FT->getReturnType()->isVoidType())
794 ReturnsVoid = true;
795 if (FT->getNoReturnAttr())
796 HasNoReturn = true;
797 }
798 }
799
800 DiagnosticsEngine &Diags = S.getDiagnostics();
801
802 // Short circuit for compilation speed.
803 if (CD.checkDiagnostics(D&: Diags, ReturnsVoid, HasNoReturn))
804 return;
805 SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
806
807 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
808 if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
809 return;
810
811 // Either in a function body compound statement, or a function-try-block.
812 switch (int FallThroughType = CheckFallThrough(AC)) {
813 case UnknownFallThrough:
814 break;
815
816 case MaybeFallThrough:
817 case AlwaysFallThrough:
818 if (HasNoReturn) {
819 if (CD.diag_FallThrough_HasNoReturn)
820 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_HasNoReturn) << CD.FunKind;
821 } else if (!ReturnsVoid && CD.diag_FallThrough_ReturnsNonVoid) {
822 // If the final statement is a call to an always-throwing function,
823 // don't warn about the fall-through.
824 if (D->getAsFunction()) {
825 if (const auto *CS = dyn_cast<CompoundStmt>(Val: Body);
826 CS && !CS->body_empty()) {
827 const Stmt *LastStmt = CS->body_back();
828 // Unwrap ExprWithCleanups if necessary.
829 if (const auto *EWC = dyn_cast<ExprWithCleanups>(Val: LastStmt)) {
830 LastStmt = EWC->getSubExpr();
831 }
832 if (const auto *CE = dyn_cast<CallExpr>(Val: LastStmt)) {
833 if (const FunctionDecl *Callee = CE->getDirectCallee();
834 Callee && Callee->hasAttr<InferredNoReturnAttr>()) {
835 return; // Don't warn about fall-through.
836 }
837 }
838 // Direct throw.
839 if (isa<CXXThrowExpr>(Val: LastStmt)) {
840 return; // Don't warn about fall-through.
841 }
842 }
843 }
844 bool NotInAllControlPaths = FallThroughType == MaybeFallThrough;
845 S.Diag(Loc: RBrace, DiagID: CD.diag_FallThrough_ReturnsNonVoid)
846 << CD.FunKind << NotInAllControlPaths;
847 }
848 break;
849 case NeverFallThroughOrReturn:
850 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
851 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
852 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 0 << FD;
853 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
854 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn) << 1 << MD;
855 } else {
856 S.Diag(Loc: LBrace, DiagID: CD.diag_NeverFallThroughOrReturn);
857 }
858 }
859 break;
860 case NeverFallThrough:
861 break;
862 }
863}
864
865//===----------------------------------------------------------------------===//
866// -Wuninitialized
867//===----------------------------------------------------------------------===//
868
869namespace {
870/// ContainsReference - A visitor class to search for references to
871/// a particular declaration (the needle) within any evaluated component of an
872/// expression (recursively).
873class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
874 bool FoundReference;
875 const DeclRefExpr *Needle;
876
877public:
878 typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
879
880 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
881 : Inherited(Context), FoundReference(false), Needle(Needle) {}
882
883 void VisitExpr(const Expr *E) {
884 // Stop evaluating if we already have a reference.
885 if (FoundReference)
886 return;
887
888 Inherited::VisitExpr(S: E);
889 }
890
891 void VisitDeclRefExpr(const DeclRefExpr *E) {
892 if (E == Needle)
893 FoundReference = true;
894 else
895 Inherited::VisitDeclRefExpr(E);
896 }
897
898 bool doesContainReference() const { return FoundReference; }
899};
900} // anonymous namespace
901
902static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
903 QualType VariableTy = VD->getType().getCanonicalType();
904 if (VariableTy->isBlockPointerType() &&
905 !VD->hasAttr<BlocksAttr>()) {
906 S.Diag(Loc: VD->getLocation(), DiagID: diag::note_block_var_fixit_add_initialization)
907 << VD->getDeclName()
908 << FixItHint::CreateInsertion(InsertionLoc: VD->getLocation(), Code: "__block ");
909 return true;
910 }
911
912 // Don't issue a fixit if there is already an initializer.
913 if (VD->getInit())
914 return false;
915
916 // Don't suggest a fixit inside macros.
917 if (VD->getEndLoc().isMacroID())
918 return false;
919
920 SourceLocation Loc = S.getLocForEndOfToken(Loc: VD->getEndLoc());
921
922 // Suggest possible initialization (if any).
923 std::string Init = S.getFixItZeroInitializerForType(T: VariableTy, Loc);
924 if (Init.empty())
925 return false;
926
927 S.Diag(Loc, DiagID: diag::note_var_fixit_add_initialization) << VD->getDeclName()
928 << FixItHint::CreateInsertion(InsertionLoc: Loc, Code: Init);
929 return true;
930}
931
932/// Create a fixit to remove an if-like statement, on the assumption that its
933/// condition is CondVal.
934static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
935 const Stmt *Else, bool CondVal,
936 FixItHint &Fixit1, FixItHint &Fixit2) {
937 if (CondVal) {
938 // If condition is always true, remove all but the 'then'.
939 Fixit1 = FixItHint::CreateRemoval(
940 RemoveRange: CharSourceRange::getCharRange(B: If->getBeginLoc(), E: Then->getBeginLoc()));
941 if (Else) {
942 SourceLocation ElseKwLoc = S.getLocForEndOfToken(Loc: Then->getEndLoc());
943 Fixit2 =
944 FixItHint::CreateRemoval(RemoveRange: SourceRange(ElseKwLoc, Else->getEndLoc()));
945 }
946 } else {
947 // If condition is always false, remove all but the 'else'.
948 if (Else)
949 Fixit1 = FixItHint::CreateRemoval(RemoveRange: CharSourceRange::getCharRange(
950 B: If->getBeginLoc(), E: Else->getBeginLoc()));
951 else
952 Fixit1 = FixItHint::CreateRemoval(RemoveRange: If->getSourceRange());
953 }
954}
955
956/// DiagUninitUse -- Helper function to produce a diagnostic for an
957/// uninitialized use of a variable.
958static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
959 bool IsCapturedByBlock) {
960 bool Diagnosed = false;
961
962 switch (Use.getKind()) {
963 case UninitUse::Always:
964 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_var)
965 << VD->getDeclName() << IsCapturedByBlock
966 << Use.getUser()->getSourceRange();
967 return;
968
969 case UninitUse::AfterDecl:
970 case UninitUse::AfterCall:
971 S.Diag(Loc: VD->getLocation(), DiagID: diag::warn_sometimes_uninit_var)
972 << VD->getDeclName() << IsCapturedByBlock
973 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
974 << VD->getLexicalDeclContext() << VD->getSourceRange();
975 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::note_uninit_var_use)
976 << IsCapturedByBlock << Use.getUser()->getSourceRange();
977 return;
978
979 case UninitUse::Maybe:
980 case UninitUse::Sometimes:
981 // Carry on to report sometimes-uninitialized branches, if possible,
982 // or a 'may be used uninitialized' diagnostic otherwise.
983 break;
984 }
985
986 // Diagnose each branch which leads to a sometimes-uninitialized use.
987 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
988 I != E; ++I) {
989 assert(Use.getKind() == UninitUse::Sometimes);
990
991 const Expr *User = Use.getUser();
992 const Stmt *Term = I->Terminator;
993
994 // Information used when building the diagnostic.
995 unsigned DiagKind;
996 StringRef Str;
997 SourceRange Range;
998
999 // FixIts to suppress the diagnostic by removing the dead condition.
1000 // For all binary terminators, branch 0 is taken if the condition is true,
1001 // and branch 1 is taken if the condition is false.
1002 int RemoveDiagKind = -1;
1003 const char *FixitStr =
1004 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
1005 : (I->Output ? "1" : "0");
1006 FixItHint Fixit1, Fixit2;
1007
1008 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
1009 default:
1010 // Don't know how to report this. Just fall back to 'may be used
1011 // uninitialized'. FIXME: Can this happen?
1012 continue;
1013
1014 // "condition is true / condition is false".
1015 case Stmt::IfStmtClass: {
1016 const IfStmt *IS = cast<IfStmt>(Val: Term);
1017 DiagKind = 0;
1018 Str = "if";
1019 Range = IS->getCond()->getSourceRange();
1020 RemoveDiagKind = 0;
1021 CreateIfFixit(S, If: IS, Then: IS->getThen(), Else: IS->getElse(),
1022 CondVal: I->Output, Fixit1, Fixit2);
1023 break;
1024 }
1025 case Stmt::ConditionalOperatorClass: {
1026 const ConditionalOperator *CO = cast<ConditionalOperator>(Val: Term);
1027 DiagKind = 0;
1028 Str = "?:";
1029 Range = CO->getCond()->getSourceRange();
1030 RemoveDiagKind = 0;
1031 CreateIfFixit(S, If: CO, Then: CO->getTrueExpr(), Else: CO->getFalseExpr(),
1032 CondVal: I->Output, Fixit1, Fixit2);
1033 break;
1034 }
1035 case Stmt::BinaryOperatorClass: {
1036 const BinaryOperator *BO = cast<BinaryOperator>(Val: Term);
1037 if (!BO->isLogicalOp())
1038 continue;
1039 DiagKind = 0;
1040 Str = BO->getOpcodeStr();
1041 Range = BO->getLHS()->getSourceRange();
1042 RemoveDiagKind = 0;
1043 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
1044 (BO->getOpcode() == BO_LOr && !I->Output))
1045 // true && y -> y, false || y -> y.
1046 Fixit1 = FixItHint::CreateRemoval(
1047 RemoveRange: SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
1048 else
1049 // false && y -> false, true || y -> true.
1050 Fixit1 = FixItHint::CreateReplacement(RemoveRange: BO->getSourceRange(), Code: FixitStr);
1051 break;
1052 }
1053
1054 // "loop is entered / loop is exited".
1055 case Stmt::WhileStmtClass:
1056 DiagKind = 1;
1057 Str = "while";
1058 Range = cast<WhileStmt>(Val: Term)->getCond()->getSourceRange();
1059 RemoveDiagKind = 1;
1060 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1061 break;
1062 case Stmt::ForStmtClass:
1063 DiagKind = 1;
1064 Str = "for";
1065 Range = cast<ForStmt>(Val: Term)->getCond()->getSourceRange();
1066 RemoveDiagKind = 1;
1067 if (I->Output)
1068 Fixit1 = FixItHint::CreateRemoval(RemoveRange: Range);
1069 else
1070 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1071 break;
1072 case Stmt::CXXForRangeStmtClass:
1073 if (I->Output == 1) {
1074 // The use occurs if a range-based for loop's body never executes.
1075 // That may be impossible, and there's no syntactic fix for this,
1076 // so treat it as a 'may be uninitialized' case.
1077 continue;
1078 }
1079 DiagKind = 1;
1080 Str = "for";
1081 Range = cast<CXXForRangeStmt>(Val: Term)->getRangeInit()->getSourceRange();
1082 break;
1083
1084 // "condition is true / loop is exited".
1085 case Stmt::DoStmtClass:
1086 DiagKind = 2;
1087 Str = "do";
1088 Range = cast<DoStmt>(Val: Term)->getCond()->getSourceRange();
1089 RemoveDiagKind = 1;
1090 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
1091 break;
1092
1093 // "switch case is taken".
1094 case Stmt::CaseStmtClass:
1095 DiagKind = 3;
1096 Str = "case";
1097 Range = cast<CaseStmt>(Val: Term)->getLHS()->getSourceRange();
1098 break;
1099 case Stmt::DefaultStmtClass:
1100 DiagKind = 3;
1101 Str = "default";
1102 Range = cast<DefaultStmt>(Val: Term)->getDefaultLoc();
1103 break;
1104 }
1105
1106 S.Diag(Loc: Range.getBegin(), DiagID: diag::warn_sometimes_uninit_var)
1107 << VD->getDeclName() << IsCapturedByBlock << DiagKind
1108 << Str << I->Output << Range;
1109 S.Diag(Loc: User->getBeginLoc(), DiagID: diag::note_uninit_var_use)
1110 << IsCapturedByBlock << User->getSourceRange();
1111 if (RemoveDiagKind != -1)
1112 S.Diag(Loc: Fixit1.RemoveRange.getBegin(), DiagID: diag::note_uninit_fixit_remove_cond)
1113 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
1114
1115 Diagnosed = true;
1116 }
1117
1118 if (!Diagnosed)
1119 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_maybe_uninit_var)
1120 << VD->getDeclName() << IsCapturedByBlock
1121 << Use.getUser()->getSourceRange();
1122}
1123
1124/// Diagnose uninitialized const reference usages.
1125static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1126 const UninitUse &Use) {
1127 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_reference)
1128 << VD->getDeclName() << Use.getUser()->getSourceRange();
1129 return !S.getDiagnostics().isLastDiagnosticIgnored();
1130}
1131
1132/// Diagnose uninitialized const pointer usages.
1133static bool DiagnoseUninitializedConstPtrUse(Sema &S, const VarDecl *VD,
1134 const UninitUse &Use) {
1135 S.Diag(Loc: Use.getUser()->getBeginLoc(), DiagID: diag::warn_uninit_const_pointer)
1136 << VD->getDeclName() << Use.getUser()->getSourceRange();
1137 return !S.getDiagnostics().isLastDiagnosticIgnored();
1138}
1139
1140/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1141/// uninitialized variable. This manages the different forms of diagnostic
1142/// emitted for particular types of uses. Returns true if the use was diagnosed
1143/// as a warning. If a particular use is one we omit warnings for, returns
1144/// false.
1145static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1146 const UninitUse &Use,
1147 bool alwaysReportSelfInit = false) {
1148 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Val: Use.getUser())) {
1149 // Inspect the initializer of the variable declaration which is
1150 // being referenced prior to its initialization. We emit
1151 // specialized diagnostics for self-initialization, and we
1152 // specifically avoid warning about self references which take the
1153 // form of:
1154 //
1155 // int x = x;
1156 //
1157 // This is used to indicate to GCC that 'x' is intentionally left
1158 // uninitialized. Proven code paths which access 'x' in
1159 // an uninitialized state after this will still warn.
1160 if (const Expr *Initializer = VD->getInit()) {
1161 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1162 return false;
1163
1164 ContainsReference CR(S.Context, DRE);
1165 CR.Visit(S: Initializer);
1166 if (CR.doesContainReference()) {
1167 S.Diag(Loc: DRE->getBeginLoc(), DiagID: diag::warn_uninit_self_reference_in_init)
1168 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1169 return !S.getDiagnostics().isLastDiagnosticIgnored();
1170 }
1171 }
1172
1173 DiagUninitUse(S, VD, Use, IsCapturedByBlock: false);
1174 } else {
1175 const BlockExpr *BE = cast<BlockExpr>(Val: Use.getUser());
1176 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1177 S.Diag(Loc: BE->getBeginLoc(),
1178 DiagID: diag::warn_uninit_byref_blockvar_captured_by_block)
1179 << VD->getDeclName()
1180 << VD->getType().getQualifiers().hasObjCLifetime();
1181 else
1182 DiagUninitUse(S, VD, Use, IsCapturedByBlock: true);
1183 }
1184
1185 // Report where the variable was declared when the use wasn't within
1186 // the initializer of that declaration & we didn't already suggest
1187 // an initialization fixit.
1188 if (!SuggestInitializationFixit(S, VD))
1189 S.Diag(Loc: VD->getBeginLoc(), DiagID: diag::note_var_declared_here)
1190 << VD->getDeclName();
1191
1192 return !S.getDiagnostics().isLastDiagnosticIgnored();
1193}
1194
1195namespace {
1196class FallthroughMapper : public DynamicRecursiveASTVisitor {
1197public:
1198 FallthroughMapper(Sema &S) : FoundSwitchStatements(false), S(S) {
1199 ShouldWalkTypesOfTypeLocs = false;
1200 }
1201
1202 bool foundSwitchStatements() const { return FoundSwitchStatements; }
1203
1204 void markFallthroughVisited(const AttributedStmt *Stmt) {
1205 bool Found = FallthroughStmts.erase(Ptr: Stmt);
1206 assert(Found);
1207 (void)Found;
1208 }
1209
1210 typedef llvm::SmallPtrSet<const AttributedStmt *, 8> AttrStmts;
1211
1212 const AttrStmts &getFallthroughStmts() const { return FallthroughStmts; }
1213
1214 void fillReachableBlocks(CFG *Cfg) {
1215 assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1216 std::deque<const CFGBlock *> BlockQueue;
1217
1218 ReachableBlocks.insert(Ptr: &Cfg->getEntry());
1219 BlockQueue.push_back(x: &Cfg->getEntry());
1220 // Mark all case blocks reachable to avoid problems with switching on
1221 // constants, covered enums, etc.
1222 // These blocks can contain fall-through annotations, and we don't want to
1223 // issue a warn_fallthrough_attr_unreachable for them.
1224 for (const auto *B : *Cfg) {
1225 const Stmt *L = B->getLabel();
1226 if (isa_and_nonnull<SwitchCase>(Val: L) && ReachableBlocks.insert(Ptr: B).second)
1227 BlockQueue.push_back(x: B);
1228 }
1229
1230 while (!BlockQueue.empty()) {
1231 const CFGBlock *P = BlockQueue.front();
1232 BlockQueue.pop_front();
1233 for (const CFGBlock *B : P->succs()) {
1234 if (B && ReachableBlocks.insert(Ptr: B).second)
1235 BlockQueue.push_back(x: B);
1236 }
1237 }
1238 }
1239
1240 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1241 bool IsTemplateInstantiation) {
1242 assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1243
1244 int UnannotatedCnt = 0;
1245 AnnotatedCnt = 0;
1246
1247 std::deque<const CFGBlock *> BlockQueue(B.pred_begin(), B.pred_end());
1248 while (!BlockQueue.empty()) {
1249 const CFGBlock *P = BlockQueue.front();
1250 BlockQueue.pop_front();
1251 if (!P)
1252 continue;
1253
1254 const Stmt *Term = P->getTerminatorStmt();
1255 if (isa_and_nonnull<SwitchStmt>(Val: Term))
1256 continue; // Switch statement, good.
1257
1258 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: P->getLabel());
1259 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1260 continue; // Previous case label has no statements, good.
1261
1262 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(Val: P->getLabel());
1263 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1264 continue; // Case label is preceded with a normal label, good.
1265
1266 if (!ReachableBlocks.count(Ptr: P)) {
1267 for (const CFGElement &Elem : llvm::reverse(C: *P)) {
1268 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1269 if (const AttributedStmt *AS = asFallThroughAttr(S: CS->getStmt())) {
1270 // Don't issue a warning for an unreachable fallthrough
1271 // attribute in template instantiations as it may not be
1272 // unreachable in all instantiations of the template.
1273 if (!IsTemplateInstantiation)
1274 S.Diag(Loc: AS->getBeginLoc(),
1275 DiagID: diag::warn_unreachable_fallthrough_attr);
1276 markFallthroughVisited(Stmt: AS);
1277 ++AnnotatedCnt;
1278 break;
1279 }
1280 // Don't care about other unreachable statements.
1281 }
1282 }
1283 // If there are no unreachable statements, this may be a special
1284 // case in CFG:
1285 // case X: {
1286 // A a; // A has a destructor.
1287 // break;
1288 // }
1289 // // <<<< This place is represented by a 'hanging' CFG block.
1290 // case Y:
1291 continue;
1292 }
1293
1294 const Stmt *LastStmt = getLastStmt(B: *P);
1295 if (const AttributedStmt *AS = asFallThroughAttr(S: LastStmt)) {
1296 markFallthroughVisited(Stmt: AS);
1297 ++AnnotatedCnt;
1298 continue; // Fallthrough annotation, good.
1299 }
1300
1301 if (!LastStmt) { // This block contains no executable statements.
1302 // Traverse its predecessors.
1303 std::copy(first: P->pred_begin(), last: P->pred_end(),
1304 result: std::back_inserter(x&: BlockQueue));
1305 continue;
1306 }
1307
1308 ++UnannotatedCnt;
1309 }
1310 return !!UnannotatedCnt;
1311 }
1312
1313 bool VisitAttributedStmt(AttributedStmt *S) override {
1314 if (asFallThroughAttr(S))
1315 FallthroughStmts.insert(Ptr: S);
1316 return true;
1317 }
1318
1319 bool VisitSwitchStmt(SwitchStmt *S) override {
1320 FoundSwitchStatements = true;
1321 return true;
1322 }
1323
1324 // We don't want to traverse local type declarations. We analyze their
1325 // methods separately.
1326 bool TraverseDecl(Decl *D) override { return true; }
1327
1328 // We analyze lambda bodies separately. Skip them here.
1329 bool TraverseLambdaExpr(LambdaExpr *LE) override {
1330 // Traverse the captures, but not the body.
1331 for (const auto C : zip(t: LE->captures(), u: LE->capture_inits()))
1332 TraverseLambdaCapture(LE, C: &std::get<0>(t: C), Init: std::get<1>(t: C));
1333 return true;
1334 }
1335
1336 private:
1337
1338 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1339 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(Val: S)) {
1340 if (hasSpecificAttr<FallThroughAttr>(container: AS->getAttrs()))
1341 return AS;
1342 }
1343 return nullptr;
1344 }
1345
1346 static const Stmt *getLastStmt(const CFGBlock &B) {
1347 if (const Stmt *Term = B.getTerminatorStmt())
1348 return Term;
1349 for (const CFGElement &Elem : llvm::reverse(C: B))
1350 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1351 return CS->getStmt();
1352 // Workaround to detect a statement thrown out by CFGBuilder:
1353 // case X: {} case Y:
1354 // case X: ; case Y:
1355 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: B.getLabel()))
1356 if (!isa<SwitchCase>(Val: SW->getSubStmt()))
1357 return SW->getSubStmt();
1358
1359 return nullptr;
1360 }
1361
1362 bool FoundSwitchStatements;
1363 AttrStmts FallthroughStmts;
1364 Sema &S;
1365 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1366};
1367} // anonymous namespace
1368
1369static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1370 SourceLocation Loc) {
1371 TokenValue FallthroughTokens[] = {
1372 tok::l_square, tok::l_square,
1373 PP.getIdentifierInfo(Name: "fallthrough"),
1374 tok::r_square, tok::r_square
1375 };
1376
1377 TokenValue ClangFallthroughTokens[] = {
1378 tok::l_square, tok::l_square, PP.getIdentifierInfo(Name: "clang"),
1379 tok::coloncolon, PP.getIdentifierInfo(Name: "fallthrough"),
1380 tok::r_square, tok::r_square
1381 };
1382
1383 bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1384
1385 StringRef MacroName;
1386 if (PreferClangAttr)
1387 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1388 if (MacroName.empty())
1389 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: FallthroughTokens);
1390 if (MacroName.empty() && !PreferClangAttr)
1391 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1392 if (MacroName.empty()) {
1393 if (!PreferClangAttr)
1394 MacroName = "[[fallthrough]]";
1395 else if (PP.getLangOpts().CPlusPlus)
1396 MacroName = "[[clang::fallthrough]]";
1397 else
1398 MacroName = "__attribute__((fallthrough))";
1399 }
1400 return MacroName;
1401}
1402
1403static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1404 bool PerFunction) {
1405 FallthroughMapper FM(S);
1406 FM.TraverseStmt(S: AC.getBody());
1407
1408 if (!FM.foundSwitchStatements())
1409 return;
1410
1411 if (PerFunction && FM.getFallthroughStmts().empty())
1412 return;
1413
1414 CFG *Cfg = AC.getCFG();
1415
1416 if (!Cfg)
1417 return;
1418
1419 FM.fillReachableBlocks(Cfg);
1420
1421 for (const CFGBlock *B : llvm::reverse(C&: *Cfg)) {
1422 const Stmt *Label = B->getLabel();
1423
1424 if (!isa_and_nonnull<SwitchCase>(Val: Label))
1425 continue;
1426
1427 int AnnotatedCnt;
1428
1429 bool IsTemplateInstantiation = false;
1430 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: AC.getDecl()))
1431 IsTemplateInstantiation = Function->isTemplateInstantiation();
1432 if (!FM.checkFallThroughIntoBlock(B: *B, AnnotatedCnt,
1433 IsTemplateInstantiation))
1434 continue;
1435
1436 S.Diag(Loc: Label->getBeginLoc(),
1437 DiagID: PerFunction ? diag::warn_unannotated_fallthrough_per_function
1438 : diag::warn_unannotated_fallthrough);
1439
1440 if (!AnnotatedCnt) {
1441 SourceLocation L = Label->getBeginLoc();
1442 if (L.isMacroID())
1443 continue;
1444
1445 const Stmt *Term = B->getTerminatorStmt();
1446 // Skip empty cases.
1447 while (B->empty() && !Term && B->succ_size() == 1) {
1448 B = *B->succ_begin();
1449 Term = B->getTerminatorStmt();
1450 }
1451 if (!(B->empty() && isa_and_nonnull<BreakStmt>(Val: Term))) {
1452 Preprocessor &PP = S.getPreprocessor();
1453 StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, Loc: L);
1454 SmallString<64> TextToInsert(AnnotationSpelling);
1455 TextToInsert += "; ";
1456 S.Diag(Loc: L, DiagID: diag::note_insert_fallthrough_fixit)
1457 << AnnotationSpelling
1458 << FixItHint::CreateInsertion(InsertionLoc: L, Code: TextToInsert);
1459 }
1460 S.Diag(Loc: L, DiagID: diag::note_insert_break_fixit)
1461 << FixItHint::CreateInsertion(InsertionLoc: L, Code: "break; ");
1462 }
1463 }
1464
1465 for (const auto *F : FM.getFallthroughStmts())
1466 S.Diag(Loc: F->getBeginLoc(), DiagID: diag::err_fallthrough_attr_invalid_placement);
1467}
1468
1469static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1470 const Stmt *S) {
1471 assert(S);
1472
1473 do {
1474 switch (S->getStmtClass()) {
1475 case Stmt::ForStmtClass:
1476 case Stmt::WhileStmtClass:
1477 case Stmt::CXXForRangeStmtClass:
1478 case Stmt::ObjCForCollectionStmtClass:
1479 return true;
1480 case Stmt::DoStmtClass: {
1481 Expr::EvalResult Result;
1482 if (!cast<DoStmt>(Val: S)->getCond()->EvaluateAsInt(Result, Ctx))
1483 return true;
1484 return Result.Val.getInt().getBoolValue();
1485 }
1486 default:
1487 break;
1488 }
1489 } while ((S = PM.getParent(S)));
1490
1491 return false;
1492}
1493
1494static void diagnoseRepeatedUseOfWeak(Sema &S,
1495 const sema::FunctionScopeInfo *CurFn,
1496 const Decl *D,
1497 const ParentMap &PM) {
1498 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1499 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1500 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1501 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1502 StmtUsesPair;
1503
1504 ASTContext &Ctx = S.getASTContext();
1505
1506 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1507
1508 // Extract all weak objects that are referenced more than once.
1509 SmallVector<StmtUsesPair, 8> UsesByStmt;
1510 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1511 I != E; ++I) {
1512 const WeakUseVector &Uses = I->second;
1513
1514 // Find the first read of the weak object.
1515 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1516 for ( ; UI != UE; ++UI) {
1517 if (UI->isUnsafe())
1518 break;
1519 }
1520
1521 // If there were only writes to this object, don't warn.
1522 if (UI == UE)
1523 continue;
1524
1525 // If there was only one read, followed by any number of writes, and the
1526 // read is not within a loop, don't warn. Additionally, don't warn in a
1527 // loop if the base object is a local variable -- local variables are often
1528 // changed in loops.
1529 if (UI == Uses.begin()) {
1530 WeakUseVector::const_iterator UI2 = UI;
1531 for (++UI2; UI2 != UE; ++UI2)
1532 if (UI2->isUnsafe())
1533 break;
1534
1535 if (UI2 == UE) {
1536 if (!isInLoop(Ctx, PM, S: UI->getUseExpr()))
1537 continue;
1538
1539 const WeakObjectProfileTy &Profile = I->first;
1540 if (!Profile.isExactProfile())
1541 continue;
1542
1543 const NamedDecl *Base = Profile.getBase();
1544 if (!Base)
1545 Base = Profile.getProperty();
1546 assert(Base && "A profile always has a base or property.");
1547
1548 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Val: Base))
1549 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Val: Base))
1550 continue;
1551 }
1552 }
1553
1554 UsesByStmt.push_back(Elt: StmtUsesPair(UI->getUseExpr(), I));
1555 }
1556
1557 if (UsesByStmt.empty())
1558 return;
1559
1560 // Sort by first use so that we emit the warnings in a deterministic order.
1561 SourceManager &SM = S.getSourceManager();
1562 llvm::sort(C&: UsesByStmt,
1563 Comp: [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1564 return SM.isBeforeInTranslationUnit(LHS: LHS.first->getBeginLoc(),
1565 RHS: RHS.first->getBeginLoc());
1566 });
1567
1568 // Classify the current code body for better warning text.
1569 // This enum should stay in sync with the cases in
1570 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1571 // FIXME: Should we use a common classification enum and the same set of
1572 // possibilities all throughout Sema?
1573 enum {
1574 Function,
1575 Method,
1576 Block,
1577 Lambda
1578 } FunctionKind;
1579
1580 if (isa<sema::BlockScopeInfo>(Val: CurFn))
1581 FunctionKind = Block;
1582 else if (isa<sema::LambdaScopeInfo>(Val: CurFn))
1583 FunctionKind = Lambda;
1584 else if (isa<ObjCMethodDecl>(Val: D))
1585 FunctionKind = Method;
1586 else
1587 FunctionKind = Function;
1588
1589 // Iterate through the sorted problems and emit warnings for each.
1590 for (const auto &P : UsesByStmt) {
1591 const Stmt *FirstRead = P.first;
1592 const WeakObjectProfileTy &Key = P.second->first;
1593 const WeakUseVector &Uses = P.second->second;
1594
1595 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1596 // may not contain enough information to determine that these are different
1597 // properties. We can only be 100% sure of a repeated use in certain cases,
1598 // and we adjust the diagnostic kind accordingly so that the less certain
1599 // case can be turned off if it is too noisy.
1600 unsigned DiagKind;
1601 if (Key.isExactProfile())
1602 DiagKind = diag::warn_arc_repeated_use_of_weak;
1603 else
1604 DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1605
1606 // Classify the weak object being accessed for better warning text.
1607 // This enum should stay in sync with the cases in
1608 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1609 enum {
1610 Variable,
1611 Property,
1612 ImplicitProperty,
1613 Ivar
1614 } ObjectKind;
1615
1616 const NamedDecl *KeyProp = Key.getProperty();
1617 if (isa<VarDecl>(Val: KeyProp))
1618 ObjectKind = Variable;
1619 else if (isa<ObjCPropertyDecl>(Val: KeyProp))
1620 ObjectKind = Property;
1621 else if (isa<ObjCMethodDecl>(Val: KeyProp))
1622 ObjectKind = ImplicitProperty;
1623 else if (isa<ObjCIvarDecl>(Val: KeyProp))
1624 ObjectKind = Ivar;
1625 else
1626 llvm_unreachable("Unexpected weak object kind!");
1627
1628 // Do not warn about IBOutlet weak property receivers being set to null
1629 // since they are typically only used from the main thread.
1630 if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(Val: KeyProp))
1631 if (Prop->hasAttr<IBOutletAttr>())
1632 continue;
1633
1634 // Show the first time the object was read.
1635 S.Diag(Loc: FirstRead->getBeginLoc(), DiagID: DiagKind)
1636 << int(ObjectKind) << KeyProp << int(FunctionKind)
1637 << FirstRead->getSourceRange();
1638
1639 // Print all the other accesses as notes.
1640 for (const auto &Use : Uses) {
1641 if (Use.getUseExpr() == FirstRead)
1642 continue;
1643 S.Diag(Loc: Use.getUseExpr()->getBeginLoc(),
1644 DiagID: diag::note_arc_weak_also_accessed_here)
1645 << Use.getUseExpr()->getSourceRange();
1646 }
1647 }
1648}
1649
1650namespace clang {
1651namespace {
1652typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1653typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1654typedef std::list<DelayedDiag> DiagList;
1655
1656struct SortDiagBySourceLocation {
1657 SourceManager &SM;
1658 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1659
1660 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1661 // Although this call will be slow, this is only called when outputting
1662 // multiple warnings.
1663 return SM.isBeforeInTranslationUnit(LHS: left.first.first, RHS: right.first.first);
1664 }
1665};
1666} // anonymous namespace
1667} // namespace clang
1668
1669namespace {
1670class UninitValsDiagReporter : public UninitVariablesHandler {
1671 Sema &S;
1672 typedef SmallVector<UninitUse, 2> UsesVec;
1673 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1674 // Prefer using MapVector to DenseMap, so that iteration order will be
1675 // the same as insertion order. This is needed to obtain a deterministic
1676 // order of diagnostics when calling flushDiagnostics().
1677 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1678 UsesMap uses;
1679
1680public:
1681 UninitValsDiagReporter(Sema &S) : S(S) {}
1682 ~UninitValsDiagReporter() override { flushDiagnostics(); }
1683
1684 MappedType &getUses(const VarDecl *vd) {
1685 MappedType &V = uses[vd];
1686 if (!V.getPointer())
1687 V.setPointer(new UsesVec());
1688 return V;
1689 }
1690
1691 void handleUseOfUninitVariable(const VarDecl *vd,
1692 const UninitUse &use) override {
1693 getUses(vd).getPointer()->push_back(Elt: use);
1694 }
1695
1696 void handleSelfInit(const VarDecl *vd) override { getUses(vd).setInt(true); }
1697
1698 void flushDiagnostics() {
1699 for (const auto &P : uses) {
1700 const VarDecl *vd = P.first;
1701 const MappedType &V = P.second;
1702
1703 UsesVec *vec = V.getPointer();
1704 bool hasSelfInit = V.getInt();
1705
1706 diagnoseUnitializedVar(vd, hasSelfInit, vec);
1707
1708 // Release the uses vector.
1709 delete vec;
1710 }
1711
1712 uses.clear();
1713 }
1714
1715private:
1716 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1717 return llvm::any_of(Range: *vec, P: [](const UninitUse &U) {
1718 return U.getKind() == UninitUse::Always ||
1719 U.getKind() == UninitUse::AfterCall ||
1720 U.getKind() == UninitUse::AfterDecl;
1721 });
1722 }
1723
1724 // Print the diagnostic for the variable. We try to warn only on the first
1725 // point at which a variable is used uninitialized. After the first
1726 // diagnostic is printed, further diagnostics for this variable are skipped.
1727 void diagnoseUnitializedVar(const VarDecl *vd, bool hasSelfInit,
1728 UsesVec *vec) {
1729 // Specially handle the case where we have uses of an uninitialized
1730 // variable, but the root cause is an idiomatic self-init. We want
1731 // to report the diagnostic at the self-init since that is the root cause.
1732 if (hasSelfInit && hasAlwaysUninitializedUse(vec)) {
1733 if (DiagnoseUninitializedUse(S, VD: vd,
1734 Use: UninitUse(vd->getInit()->IgnoreParenCasts(),
1735 /*isAlwaysUninit=*/true),
1736 /*alwaysReportSelfInit=*/true))
1737 return;
1738 }
1739
1740 // Sort the uses by their SourceLocations. While not strictly
1741 // guaranteed to produce them in line/column order, this will provide
1742 // a stable ordering.
1743 llvm::sort(C&: *vec, Comp: [](const UninitUse &a, const UninitUse &b) {
1744 // Prefer the direct use of an uninitialized variable over its use via
1745 // constant reference or pointer.
1746 if (a.isConstRefOrPtrUse() != b.isConstRefOrPtrUse())
1747 return b.isConstRefOrPtrUse();
1748 // Prefer a more confident report over a less confident one.
1749 if (a.getKind() != b.getKind())
1750 return a.getKind() > b.getKind();
1751 return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1752 });
1753
1754 for (const auto &U : *vec) {
1755 if (U.isConstRefUse()) {
1756 if (DiagnoseUninitializedConstRefUse(S, VD: vd, Use: U))
1757 return;
1758 } else if (U.isConstPtrUse()) {
1759 if (DiagnoseUninitializedConstPtrUse(S, VD: vd, Use: U))
1760 return;
1761 } else {
1762 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1763 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1764 if (DiagnoseUninitializedUse(S, VD: vd, Use))
1765 return;
1766 }
1767 }
1768 }
1769};
1770
1771/// Inter-procedural data for the called-once checker.
1772class CalledOnceInterProceduralData {
1773public:
1774 // Add the delayed warning for the given block.
1775 void addDelayedWarning(const BlockDecl *Block,
1776 PartialDiagnosticAt &&Warning) {
1777 DelayedBlockWarnings[Block].emplace_back(Args: std::move(Warning));
1778 }
1779 // Report all of the warnings we've gathered for the given block.
1780 void flushWarnings(const BlockDecl *Block, Sema &S) {
1781 for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1782 S.Diag(Loc: Delayed.first, PD: Delayed.second);
1783
1784 discardWarnings(Block);
1785 }
1786 // Discard all of the warnings we've gathered for the given block.
1787 void discardWarnings(const BlockDecl *Block) {
1788 DelayedBlockWarnings.erase(Val: Block);
1789 }
1790
1791private:
1792 using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1793 llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1794};
1795
1796class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1797public:
1798 CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1799 : S(S), Data(Data) {}
1800 void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1801 const Expr *PrevCall, bool IsCompletionHandler,
1802 bool Poised) override {
1803 auto DiagToReport = IsCompletionHandler
1804 ? diag::warn_completion_handler_called_twice
1805 : diag::warn_called_once_gets_called_twice;
1806 S.Diag(Loc: Call->getBeginLoc(), DiagID: DiagToReport) << Parameter;
1807 S.Diag(Loc: PrevCall->getBeginLoc(), DiagID: diag::note_called_once_gets_called_twice)
1808 << Poised;
1809 }
1810
1811 void handleNeverCalled(const ParmVarDecl *Parameter,
1812 bool IsCompletionHandler) override {
1813 auto DiagToReport = IsCompletionHandler
1814 ? diag::warn_completion_handler_never_called
1815 : diag::warn_called_once_never_called;
1816 S.Diag(Loc: Parameter->getBeginLoc(), DiagID: DiagToReport)
1817 << Parameter << /* Captured */ false;
1818 }
1819
1820 void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1821 const Stmt *Where, NeverCalledReason Reason,
1822 bool IsCalledDirectly,
1823 bool IsCompletionHandler) override {
1824 auto DiagToReport = IsCompletionHandler
1825 ? diag::warn_completion_handler_never_called_when
1826 : diag::warn_called_once_never_called_when;
1827 PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagID: DiagToReport)
1828 << Parameter
1829 << IsCalledDirectly
1830 << (unsigned)Reason);
1831
1832 if (const auto *Block = dyn_cast<BlockDecl>(Val: Function)) {
1833 // We shouldn't report these warnings on blocks immediately
1834 Data.addDelayedWarning(Block, Warning: std::move(Warning));
1835 } else {
1836 S.Diag(Loc: Warning.first, PD: Warning.second);
1837 }
1838 }
1839
1840 void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1841 const Decl *Where,
1842 bool IsCompletionHandler) override {
1843 auto DiagToReport = IsCompletionHandler
1844 ? diag::warn_completion_handler_never_called
1845 : diag::warn_called_once_never_called;
1846 S.Diag(Loc: Where->getBeginLoc(), DiagID: DiagToReport)
1847 << Parameter << /* Captured */ true;
1848 }
1849
1850 void
1851 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1852 Data.flushWarnings(Block, S);
1853 }
1854
1855 void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1856 Data.discardWarnings(Block);
1857 }
1858
1859private:
1860 Sema &S;
1861 CalledOnceInterProceduralData &Data;
1862};
1863
1864constexpr unsigned CalledOnceWarnings[] = {
1865 diag::warn_called_once_never_called,
1866 diag::warn_called_once_never_called_when,
1867 diag::warn_called_once_gets_called_twice};
1868
1869constexpr unsigned CompletionHandlerWarnings[]{
1870 diag::warn_completion_handler_never_called,
1871 diag::warn_completion_handler_never_called_when,
1872 diag::warn_completion_handler_called_twice};
1873
1874bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1875 const DiagnosticsEngine &Diags,
1876 SourceLocation At) {
1877 return llvm::any_of(Range&: DiagIDs, P: [&Diags, At](unsigned DiagID) {
1878 return !Diags.isIgnored(DiagID, Loc: At);
1879 });
1880}
1881
1882bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1883 SourceLocation At) {
1884 return shouldAnalyzeCalledOnceImpl(DiagIDs: CompletionHandlerWarnings, Diags, At);
1885}
1886
1887bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1888 SourceLocation At) {
1889 return shouldAnalyzeCalledOnceImpl(DiagIDs: CalledOnceWarnings, Diags, At) ||
1890 shouldAnalyzeCalledOnceConventions(Diags, At);
1891}
1892} // anonymous namespace
1893
1894//===----------------------------------------------------------------------===//
1895// -Wthread-safety
1896//===----------------------------------------------------------------------===//
1897namespace clang {
1898namespace threadSafety {
1899namespace {
1900class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1901 Sema &S;
1902 DiagList Warnings;
1903 SourceLocation FunLocation, FunEndLocation;
1904
1905 const FunctionDecl *CurrentFunction;
1906 bool Verbose;
1907
1908 OptionalNotes getNotes() const {
1909 if (Verbose && CurrentFunction) {
1910 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1911 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1912 << CurrentFunction);
1913 return OptionalNotes(1, FNote);
1914 }
1915 return OptionalNotes();
1916 }
1917
1918 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1919 OptionalNotes ONS(1, Note);
1920 if (Verbose && CurrentFunction) {
1921 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1922 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1923 << CurrentFunction);
1924 ONS.push_back(Elt: std::move(FNote));
1925 }
1926 return ONS;
1927 }
1928
1929 OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1930 const PartialDiagnosticAt &Note2) const {
1931 OptionalNotes ONS;
1932 ONS.push_back(Elt: Note1);
1933 ONS.push_back(Elt: Note2);
1934 if (Verbose && CurrentFunction) {
1935 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1936 S.PDiag(DiagID: diag::note_thread_warning_in_fun)
1937 << CurrentFunction);
1938 ONS.push_back(Elt: std::move(FNote));
1939 }
1940 return ONS;
1941 }
1942
1943 OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1944 return LocLocked.isValid()
1945 ? getNotes(Note: PartialDiagnosticAt(
1946 LocLocked, S.PDiag(DiagID: diag::note_locked_here) << Kind))
1947 : getNotes();
1948 }
1949
1950 OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1951 StringRef Kind) {
1952 return LocUnlocked.isValid()
1953 ? getNotes(Note: PartialDiagnosticAt(
1954 LocUnlocked, S.PDiag(DiagID: diag::note_unlocked_here) << Kind))
1955 : getNotes();
1956 }
1957
1958 OptionalNotes makeManagedMismatchNoteForParam(SourceLocation DeclLoc) {
1959 return DeclLoc.isValid()
1960 ? getNotes(Note: PartialDiagnosticAt(
1961 DeclLoc,
1962 S.PDiag(DiagID: diag::note_managed_mismatch_here_for_param)))
1963 : getNotes();
1964 }
1965
1966 public:
1967 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1968 : S(S), FunLocation(FL), FunEndLocation(FEL),
1969 CurrentFunction(nullptr), Verbose(false) {}
1970
1971 void setVerbose(bool b) { Verbose = b; }
1972
1973 /// Emit all buffered diagnostics in order of sourcelocation.
1974 /// We need to output diagnostics produced while iterating through
1975 /// the lockset in deterministic order, so this function orders diagnostics
1976 /// and outputs them.
1977 void emitDiagnostics() {
1978 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
1979 for (const auto &Diag : Warnings) {
1980 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
1981 for (const auto &Note : Diag.second)
1982 S.Diag(Loc: Note.first, PD: Note.second);
1983 }
1984 }
1985
1986 void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc,
1987 Name scopeName, StringRef Kind,
1988 Name expected, Name actual) override {
1989 PartialDiagnosticAt Warning(Loc,
1990 S.PDiag(DiagID: diag::warn_unmatched_underlying_mutexes)
1991 << Kind << scopeName << expected << actual);
1992 Warnings.emplace_back(args: std::move(Warning),
1993 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
1994 }
1995
1996 void handleExpectMoreUnderlyingMutexes(SourceLocation Loc,
1997 SourceLocation DLoc, Name scopeName,
1998 StringRef Kind,
1999 Name expected) override {
2000 PartialDiagnosticAt Warning(
2001 Loc, S.PDiag(DiagID: diag::warn_expect_more_underlying_mutexes)
2002 << Kind << scopeName << expected);
2003 Warnings.emplace_back(args: std::move(Warning),
2004 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2005 }
2006
2007 void handleExpectFewerUnderlyingMutexes(SourceLocation Loc,
2008 SourceLocation DLoc, Name scopeName,
2009 StringRef Kind,
2010 Name actual) override {
2011 PartialDiagnosticAt Warning(
2012 Loc, S.PDiag(DiagID: diag::warn_expect_fewer_underlying_mutexes)
2013 << Kind << scopeName << actual);
2014 Warnings.emplace_back(args: std::move(Warning),
2015 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
2016 }
2017
2018 void handleInvalidLockExp(SourceLocation Loc) override {
2019 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_cannot_resolve_lock)
2020 << Loc);
2021 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2022 }
2023
2024 void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
2025 SourceLocation LocPreviousUnlock) override {
2026 if (Loc.isInvalid())
2027 Loc = FunLocation;
2028 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_unlock_but_no_lock)
2029 << Kind << LockName);
2030 Warnings.emplace_back(args: std::move(Warning),
2031 args: makeUnlockedHereNote(LocUnlocked: LocPreviousUnlock, Kind));
2032 }
2033
2034 void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
2035 LockKind Expected, LockKind Received,
2036 SourceLocation LocLocked,
2037 SourceLocation LocUnlock) override {
2038 if (LocUnlock.isInvalid())
2039 LocUnlock = FunLocation;
2040 PartialDiagnosticAt Warning(
2041 LocUnlock, S.PDiag(DiagID: diag::warn_unlock_kind_mismatch)
2042 << Kind << LockName << Received << Expected);
2043 Warnings.emplace_back(args: std::move(Warning),
2044 args: makeLockedHereNote(LocLocked, Kind));
2045 }
2046
2047 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
2048 SourceLocation LocDoubleLock) override {
2049 if (LocDoubleLock.isInvalid())
2050 LocDoubleLock = FunLocation;
2051 PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(DiagID: diag::warn_double_lock)
2052 << Kind << LockName);
2053 Warnings.emplace_back(args: std::move(Warning),
2054 args: makeLockedHereNote(LocLocked, Kind));
2055 }
2056
2057 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
2058 SourceLocation LocLocked,
2059 SourceLocation LocEndOfScope,
2060 LockErrorKind LEK,
2061 bool ReentrancyMismatch) override {
2062 unsigned DiagID = 0;
2063 switch (LEK) {
2064 case LEK_LockedSomePredecessors:
2065 DiagID = diag::warn_lock_some_predecessors;
2066 break;
2067 case LEK_LockedSomeLoopIterations:
2068 DiagID = diag::warn_expecting_lock_held_on_loop;
2069 break;
2070 case LEK_LockedAtEndOfFunction:
2071 DiagID = diag::warn_no_unlock;
2072 break;
2073 case LEK_NotLockedAtEndOfFunction:
2074 DiagID = diag::warn_expecting_locked;
2075 break;
2076 }
2077 if (LocEndOfScope.isInvalid())
2078 LocEndOfScope = FunEndLocation;
2079
2080 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID)
2081 << Kind << LockName
2082 << ReentrancyMismatch);
2083 Warnings.emplace_back(args: std::move(Warning),
2084 args: makeLockedHereNote(LocLocked, Kind));
2085 }
2086
2087 void handleExclusiveAndShared(StringRef Kind, Name LockName,
2088 SourceLocation Loc1,
2089 SourceLocation Loc2) override {
2090 PartialDiagnosticAt Warning(Loc1,
2091 S.PDiag(DiagID: diag::warn_lock_exclusive_and_shared)
2092 << Kind << LockName);
2093 PartialDiagnosticAt Note(Loc2, S.PDiag(DiagID: diag::note_lock_exclusive_and_shared)
2094 << Kind << LockName);
2095 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2096 }
2097
2098 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
2099 AccessKind AK, SourceLocation Loc) override {
2100 unsigned DiagID = 0;
2101 switch (POK) {
2102 case POK_VarAccess:
2103 case POK_PassByRef:
2104 case POK_ReturnByRef:
2105 case POK_PassPointer:
2106 case POK_ReturnPointer:
2107 DiagID = diag::warn_variable_requires_any_lock;
2108 break;
2109 case POK_VarDereference:
2110 case POK_PtPassByRef:
2111 case POK_PtReturnByRef:
2112 case POK_PtPassPointer:
2113 case POK_PtReturnPointer:
2114 DiagID = diag::warn_var_deref_requires_any_lock;
2115 break;
2116 case POK_FunctionCall:
2117 llvm_unreachable("Only works for variables");
2118 break;
2119 }
2120 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
2121 << D << getLockKindFromAccessKind(AK));
2122 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2123 }
2124
2125 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
2126 ProtectedOperationKind POK, Name LockName,
2127 LockKind LK, SourceLocation Loc,
2128 Name *PossibleMatch) override {
2129 unsigned DiagID = 0;
2130 if (PossibleMatch) {
2131 switch (POK) {
2132 case POK_VarAccess:
2133 DiagID = diag::warn_variable_requires_lock_precise;
2134 break;
2135 case POK_VarDereference:
2136 DiagID = diag::warn_var_deref_requires_lock_precise;
2137 break;
2138 case POK_FunctionCall:
2139 DiagID = diag::warn_fun_requires_lock_precise;
2140 break;
2141 case POK_PassByRef:
2142 DiagID = diag::warn_guarded_pass_by_reference;
2143 break;
2144 case POK_PtPassByRef:
2145 DiagID = diag::warn_pt_guarded_pass_by_reference;
2146 break;
2147 case POK_ReturnByRef:
2148 DiagID = diag::warn_guarded_return_by_reference;
2149 break;
2150 case POK_PtReturnByRef:
2151 DiagID = diag::warn_pt_guarded_return_by_reference;
2152 break;
2153 case POK_PassPointer:
2154 DiagID = diag::warn_guarded_pass_pointer;
2155 break;
2156 case POK_PtPassPointer:
2157 DiagID = diag::warn_pt_guarded_pass_pointer;
2158 break;
2159 case POK_ReturnPointer:
2160 DiagID = diag::warn_guarded_return_pointer;
2161 break;
2162 case POK_PtReturnPointer:
2163 DiagID = diag::warn_pt_guarded_return_pointer;
2164 break;
2165 }
2166 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2167 << D
2168 << LockName << LK);
2169 PartialDiagnosticAt Note(Loc, S.PDiag(DiagID: diag::note_found_mutex_near_match)
2170 << *PossibleMatch);
2171 if (Verbose && POK == POK_VarAccess) {
2172 PartialDiagnosticAt VNote(D->getLocation(),
2173 S.PDiag(DiagID: diag::note_guarded_by_declared_here)
2174 << D->getDeclName());
2175 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note1: Note, Note2: VNote));
2176 } else
2177 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2178 } else {
2179 switch (POK) {
2180 case POK_VarAccess:
2181 DiagID = diag::warn_variable_requires_lock;
2182 break;
2183 case POK_VarDereference:
2184 DiagID = diag::warn_var_deref_requires_lock;
2185 break;
2186 case POK_FunctionCall:
2187 DiagID = diag::warn_fun_requires_lock;
2188 break;
2189 case POK_PassByRef:
2190 DiagID = diag::warn_guarded_pass_by_reference;
2191 break;
2192 case POK_PtPassByRef:
2193 DiagID = diag::warn_pt_guarded_pass_by_reference;
2194 break;
2195 case POK_ReturnByRef:
2196 DiagID = diag::warn_guarded_return_by_reference;
2197 break;
2198 case POK_PtReturnByRef:
2199 DiagID = diag::warn_pt_guarded_return_by_reference;
2200 break;
2201 case POK_PassPointer:
2202 DiagID = diag::warn_guarded_pass_pointer;
2203 break;
2204 case POK_PtPassPointer:
2205 DiagID = diag::warn_pt_guarded_pass_pointer;
2206 break;
2207 case POK_ReturnPointer:
2208 DiagID = diag::warn_guarded_return_pointer;
2209 break;
2210 case POK_PtReturnPointer:
2211 DiagID = diag::warn_pt_guarded_return_pointer;
2212 break;
2213 }
2214 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2215 << D
2216 << LockName << LK);
2217 if (Verbose && POK == POK_VarAccess) {
2218 PartialDiagnosticAt Note(D->getLocation(),
2219 S.PDiag(DiagID: diag::note_guarded_by_declared_here));
2220 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2221 } else
2222 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2223 }
2224 }
2225
2226 void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2227 SourceLocation Loc) override {
2228 PartialDiagnosticAt Warning(Loc,
2229 S.PDiag(DiagID: diag::warn_acquire_requires_negative_cap)
2230 << Kind << LockName << Neg);
2231 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2232 }
2233
2234 void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2235 SourceLocation Loc) override {
2236 PartialDiagnosticAt Warning(
2237 Loc, S.PDiag(DiagID: diag::warn_fun_requires_negative_cap) << D << LockName);
2238 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2239 }
2240
2241 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2242 SourceLocation Loc) override {
2243 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_fun_excludes_mutex)
2244 << Kind << FunName << LockName);
2245 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2246 }
2247
2248 void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2249 SourceLocation Loc) override {
2250 PartialDiagnosticAt Warning(Loc,
2251 S.PDiag(DiagID: diag::warn_acquired_before) << Kind << L1Name << L2Name);
2252 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2253 }
2254
2255 void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2256 PartialDiagnosticAt Warning(Loc,
2257 S.PDiag(DiagID: diag::warn_acquired_before_after_cycle) << L1Name);
2258 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2259 }
2260
2261 void enterFunction(const FunctionDecl* FD) override {
2262 CurrentFunction = FD;
2263 }
2264
2265 void leaveFunction(const FunctionDecl* FD) override {
2266 CurrentFunction = nullptr;
2267 }
2268};
2269} // anonymous namespace
2270} // namespace threadSafety
2271} // namespace clang
2272
2273//===----------------------------------------------------------------------===//
2274// -Wconsumed
2275//===----------------------------------------------------------------------===//
2276
2277namespace clang {
2278namespace consumed {
2279namespace {
2280class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2281
2282 Sema &S;
2283 DiagList Warnings;
2284
2285public:
2286
2287 ConsumedWarningsHandler(Sema &S) : S(S) {}
2288
2289 void emitDiagnostics() override {
2290 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
2291 for (const auto &Diag : Warnings) {
2292 S.Diag(Loc: Diag.first.first, PD: Diag.first.second);
2293 for (const auto &Note : Diag.second)
2294 S.Diag(Loc: Note.first, PD: Note.second);
2295 }
2296 }
2297
2298 void warnLoopStateMismatch(SourceLocation Loc,
2299 StringRef VariableName) override {
2300 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_loop_state_mismatch) <<
2301 VariableName);
2302
2303 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2304 }
2305
2306 void warnParamReturnTypestateMismatch(SourceLocation Loc,
2307 StringRef VariableName,
2308 StringRef ExpectedState,
2309 StringRef ObservedState) override {
2310
2311 PartialDiagnosticAt Warning(Loc, S.PDiag(
2312 DiagID: diag::warn_param_return_typestate_mismatch) << VariableName <<
2313 ExpectedState << ObservedState);
2314
2315 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2316 }
2317
2318 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2319 StringRef ObservedState) override {
2320
2321 PartialDiagnosticAt Warning(Loc, S.PDiag(
2322 DiagID: diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2323
2324 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2325 }
2326
2327 void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2328 StringRef TypeName) override {
2329 PartialDiagnosticAt Warning(Loc, S.PDiag(
2330 DiagID: diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2331
2332 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2333 }
2334
2335 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2336 StringRef ObservedState) override {
2337
2338 PartialDiagnosticAt Warning(Loc, S.PDiag(
2339 DiagID: diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2340
2341 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2342 }
2343
2344 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2345 SourceLocation Loc) override {
2346
2347 PartialDiagnosticAt Warning(Loc, S.PDiag(
2348 DiagID: diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2349
2350 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2351 }
2352
2353 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2354 StringRef State, SourceLocation Loc) override {
2355
2356 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID: diag::warn_use_in_invalid_state) <<
2357 MethodName << VariableName << State);
2358
2359 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2360 }
2361};
2362} // anonymous namespace
2363} // namespace consumed
2364} // namespace clang
2365
2366//===----------------------------------------------------------------------===//
2367// Unsafe buffer usage analysis.
2368//===----------------------------------------------------------------------===//
2369
2370namespace {
2371class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2372 Sema &S;
2373 bool SuggestSuggestions; // Recommend -fsafe-buffer-usage-suggestions?
2374
2375 // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2376 // itself:
2377 std::string listVariableGroupAsString(
2378 const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2379 if (VarGroupForVD.size() <= 1)
2380 return "";
2381
2382 std::vector<StringRef> VarNames;
2383 auto PutInQuotes = [](StringRef S) -> std::string {
2384 return "'" + S.str() + "'";
2385 };
2386
2387 for (auto *V : VarGroupForVD) {
2388 if (V == VD)
2389 continue;
2390 VarNames.push_back(x: V->getName());
2391 }
2392 if (VarNames.size() == 1) {
2393 return PutInQuotes(VarNames[0]);
2394 }
2395 if (VarNames.size() == 2) {
2396 return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2397 }
2398 assert(VarGroupForVD.size() > 3);
2399 const unsigned N = VarNames.size() -
2400 2; // need to print the last two names as "..., X, and Y"
2401 std::string AllVars = "";
2402
2403 for (unsigned I = 0; I < N; ++I)
2404 AllVars.append(str: PutInQuotes(VarNames[I]) + ", ");
2405 AllVars.append(str: PutInQuotes(VarNames[N]) + ", and " +
2406 PutInQuotes(VarNames[N + 1]));
2407 return AllVars;
2408 }
2409
2410public:
2411 UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2412 : S(S), SuggestSuggestions(SuggestSuggestions) {}
2413
2414 void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2415 ASTContext &Ctx) override {
2416 SourceLocation Loc;
2417 SourceRange Range;
2418 unsigned MsgParam = 0;
2419 NamedDecl *D = nullptr;
2420 if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Val: Operation)) {
2421 Loc = ASE->getBase()->getExprLoc();
2422 Range = ASE->getBase()->getSourceRange();
2423 MsgParam = 2;
2424 } else if (const auto *BO = dyn_cast<BinaryOperator>(Val: Operation)) {
2425 BinaryOperator::Opcode Op = BO->getOpcode();
2426 if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2427 Op == BO_SubAssign) {
2428 if (BO->getRHS()->getType()->isIntegerType()) {
2429 Loc = BO->getLHS()->getExprLoc();
2430 Range = BO->getLHS()->getSourceRange();
2431 } else {
2432 Loc = BO->getRHS()->getExprLoc();
2433 Range = BO->getRHS()->getSourceRange();
2434 }
2435 MsgParam = 1;
2436 }
2437 } else if (const auto *UO = dyn_cast<UnaryOperator>(Val: Operation)) {
2438 UnaryOperator::Opcode Op = UO->getOpcode();
2439 if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2440 Op == UO_PostDec) {
2441 Loc = UO->getSubExpr()->getExprLoc();
2442 Range = UO->getSubExpr()->getSourceRange();
2443 MsgParam = 1;
2444 }
2445 } else {
2446 if (isa<CallExpr>(Val: Operation) || isa<CXXConstructExpr>(Val: Operation)) {
2447 // note_unsafe_buffer_operation doesn't have this mode yet.
2448 assert(!IsRelatedToDecl && "Not implemented yet!");
2449 MsgParam = 3;
2450 } else if (isa<MemberExpr>(Val: Operation)) {
2451 // note_unsafe_buffer_operation doesn't have this mode yet.
2452 assert(!IsRelatedToDecl && "Not implemented yet!");
2453 auto *ME = cast<MemberExpr>(Val: Operation);
2454 D = ME->getMemberDecl();
2455 MsgParam = 5;
2456 } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Val: Operation)) {
2457 QualType destType = ECE->getType();
2458 bool destTypeComplete = true;
2459
2460 if (!isa<PointerType>(Val: destType))
2461 return;
2462 destType = destType.getTypePtr()->getPointeeType();
2463 if (const auto *D = destType->getAsTagDecl())
2464 destTypeComplete = D->isCompleteDefinition();
2465
2466 // If destination type is incomplete, it is unsafe to cast to anyway, no
2467 // need to check its type:
2468 if (destTypeComplete) {
2469 const uint64_t dSize = Ctx.getTypeSize(T: destType);
2470 QualType srcType = ECE->getSubExpr()->getType();
2471
2472 assert(srcType->isPointerType());
2473
2474 const uint64_t sSize =
2475 Ctx.getTypeSize(T: srcType.getTypePtr()->getPointeeType());
2476
2477 if (sSize >= dSize)
2478 return;
2479 }
2480 if (const auto *CE = dyn_cast<CXXMemberCallExpr>(
2481 Val: ECE->getSubExpr()->IgnoreParens())) {
2482 D = CE->getMethodDecl();
2483 }
2484
2485 if (!D)
2486 return;
2487
2488 MsgParam = 4;
2489 }
2490 Loc = Operation->getBeginLoc();
2491 Range = Operation->getSourceRange();
2492 }
2493 if (IsRelatedToDecl) {
2494 assert(!SuggestSuggestions &&
2495 "Variables blamed for unsafe buffer usage without suggestions!");
2496 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2497 } else {
2498 if (D) {
2499 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation)
2500 << MsgParam << D << Range;
2501 } else {
2502 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2503 }
2504 if (SuggestSuggestions) {
2505 S.Diag(Loc, DiagID: diag::note_safe_buffer_usage_suggestions_disabled);
2506 }
2507 }
2508 }
2509
2510 void handleUnsafeLibcCall(const CallExpr *Call, unsigned PrintfInfo,
2511 ASTContext &Ctx,
2512 const Expr *UnsafeArg = nullptr) override {
2513 unsigned DiagID = diag::warn_unsafe_buffer_libc_call;
2514 if (PrintfInfo & 0x8) {
2515 // The callee is a function with the format attribute. See the
2516 // documentation of PrintfInfo in UnsafeBufferUsageHandler, and
2517 // UnsafeLibcFunctionCallGadget::UnsafeKind.
2518 DiagID = diag::warn_unsafe_buffer_format_attr_call;
2519 PrintfInfo ^= 0x8;
2520 }
2521 S.Diag(Loc: Call->getBeginLoc(), DiagID)
2522 << Call->getDirectCallee() // We've checked there is a direct callee
2523 << Call->getSourceRange();
2524 if (PrintfInfo > 0) {
2525 SourceRange R =
2526 UnsafeArg ? UnsafeArg->getSourceRange() : Call->getSourceRange();
2527 S.Diag(Loc: R.getBegin(), DiagID: diag::note_unsafe_buffer_printf_call)
2528 << PrintfInfo << R;
2529 }
2530 }
2531
2532 void handleUnsafeOperationInContainer(const Stmt *Operation,
2533 bool IsRelatedToDecl,
2534 ASTContext &Ctx) override {
2535 SourceLocation Loc;
2536 SourceRange Range;
2537 unsigned MsgParam = 0;
2538
2539 // This function only handles SpanTwoParamConstructorGadget so far, which
2540 // always gives a CXXConstructExpr.
2541 const auto *CtorExpr = cast<CXXConstructExpr>(Val: Operation);
2542 Loc = CtorExpr->getLocation();
2543
2544 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_in_container);
2545 if (IsRelatedToDecl) {
2546 assert(!SuggestSuggestions &&
2547 "Variables blamed for unsafe buffer usage without suggestions!");
2548 S.Diag(Loc, DiagID: diag::note_unsafe_buffer_operation) << MsgParam << Range;
2549 }
2550 }
2551
2552 void handleUnsafeVariableGroup(const VarDecl *Variable,
2553 const VariableGroupsManager &VarGrpMgr,
2554 FixItList &&Fixes, const Decl *D,
2555 const FixitStrategy &VarTargetTypes) override {
2556 assert(!SuggestSuggestions &&
2557 "Unsafe buffer usage fixits displayed without suggestions!");
2558 S.Diag(Loc: Variable->getLocation(), DiagID: diag::warn_unsafe_buffer_variable)
2559 << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2560 << Variable->getSourceRange();
2561 if (!Fixes.empty()) {
2562 assert(isa<NamedDecl>(D) &&
2563 "Fix-its are generated only for `NamedDecl`s");
2564 const NamedDecl *ND = cast<NamedDecl>(Val: D);
2565 bool BriefMsg = false;
2566 // If the variable group involves parameters, the diagnostic message will
2567 // NOT explain how the variables are grouped as the reason is non-trivial
2568 // and irrelavant to users' experience:
2569 const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Var: Variable, HasParm: &BriefMsg);
2570 unsigned FixItStrategy = 0;
2571 switch (VarTargetTypes.lookup(VD: Variable)) {
2572 case clang::FixitStrategy::Kind::Span:
2573 FixItStrategy = 0;
2574 break;
2575 case clang::FixitStrategy::Kind::Array:
2576 FixItStrategy = 1;
2577 break;
2578 default:
2579 assert(false && "We support only std::span and std::array");
2580 };
2581
2582 const auto &FD =
2583 S.Diag(Loc: Variable->getLocation(),
2584 DiagID: BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2585 : diag::note_unsafe_buffer_variable_fixit_group);
2586
2587 FD << Variable << FixItStrategy;
2588 FD << listVariableGroupAsString(VD: Variable, VarGroupForVD)
2589 << (VarGroupForVD.size() > 1) << ND;
2590 for (const auto &F : Fixes) {
2591 FD << F;
2592 }
2593 }
2594
2595#ifndef NDEBUG
2596 if (areDebugNotesRequested())
2597 for (const DebugNote &Note: DebugNotesByVar[Variable])
2598 S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2599#endif
2600 }
2601
2602 void handleUnsafeUniquePtrArrayAccess(const DynTypedNode &Node,
2603 bool IsRelatedToDecl,
2604 ASTContext &Ctx) override {
2605 SourceLocation Loc;
2606
2607 Loc = Node.get<Stmt>()->getBeginLoc();
2608 S.Diag(Loc, DiagID: diag::warn_unsafe_buffer_usage_unique_ptr_array_access)
2609 << Node.getSourceRange();
2610 }
2611
2612 bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2613 return S.PP.isSafeBufferOptOut(SourceMgr: S.getSourceManager(), Loc);
2614 }
2615
2616 bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2617 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container, Loc);
2618 }
2619
2620 bool ignoreUnsafeBufferInLibcCall(const SourceLocation &Loc) const override {
2621 return S.Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc);
2622 }
2623
2624 bool ignoreUnsafeBufferInStaticSizedArray(
2625 const SourceLocation &Loc) const override {
2626 return S.Diags.isIgnored(
2627 DiagID: diag::warn_unsafe_buffer_usage_in_static_sized_array, Loc);
2628 }
2629
2630 // Returns the text representation of clang::unsafe_buffer_usage attribute.
2631 // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2632 // characters.
2633 std::string
2634 getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2635 StringRef WSSuffix = "") const override {
2636 Preprocessor &PP = S.getPreprocessor();
2637 TokenValue ClangUnsafeBufferUsageTokens[] = {
2638 tok::l_square,
2639 tok::l_square,
2640 PP.getIdentifierInfo(Name: "clang"),
2641 tok::coloncolon,
2642 PP.getIdentifierInfo(Name: "unsafe_buffer_usage"),
2643 tok::r_square,
2644 tok::r_square};
2645
2646 StringRef MacroName;
2647
2648 // The returned macro (it returns) is guaranteed not to be function-like:
2649 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangUnsafeBufferUsageTokens);
2650 if (MacroName.empty())
2651 MacroName = "[[clang::unsafe_buffer_usage]]";
2652 return MacroName.str() + WSSuffix.str();
2653 }
2654};
2655} // namespace
2656
2657//===----------------------------------------------------------------------===//
2658// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2659// warnings on a function, method, or block.
2660//===----------------------------------------------------------------------===//
2661
2662sema::AnalysisBasedWarnings::Policy::Policy() {
2663 enableCheckFallThrough = 1;
2664 enableCheckUnreachable = 0;
2665 enableThreadSafetyAnalysis = 0;
2666 enableConsumedAnalysis = 0;
2667}
2668
2669/// InterProceduralData aims to be a storage of whatever data should be passed
2670/// between analyses of different functions.
2671///
2672/// At the moment, its primary goal is to make the information gathered during
2673/// the analysis of the blocks available during the analysis of the enclosing
2674/// function. This is important due to the fact that blocks are analyzed before
2675/// the enclosed function is even parsed fully, so it is not viable to access
2676/// anything in the outer scope while analyzing the block. On the other hand,
2677/// re-building CFG for blocks and re-analyzing them when we do have all the
2678/// information (i.e. during the analysis of the enclosing function) seems to be
2679/// ill-designed.
2680class sema::AnalysisBasedWarnings::InterProceduralData {
2681public:
2682 // It is important to analyze blocks within functions because it's a very
2683 // common pattern to capture completion handler parameters by blocks.
2684 CalledOnceInterProceduralData CalledOnceData;
2685};
2686
2687template <typename... Ts>
2688static bool areAnyEnabled(DiagnosticsEngine &D, SourceLocation Loc,
2689 Ts... Diags) {
2690 return (!D.isIgnored(DiagID: Diags, Loc) || ...);
2691}
2692
2693sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2694 : S(s), IPData(std::make_unique<InterProceduralData>()),
2695 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2696 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2697 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2698 NumUninitAnalysisBlockVisits(0),
2699 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2700}
2701
2702// We need this here for unique_ptr with forward declared class.
2703sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2704
2705sema::AnalysisBasedWarnings::Policy
2706sema::AnalysisBasedWarnings::getPolicyInEffectAt(SourceLocation Loc) {
2707 using namespace diag;
2708 DiagnosticsEngine &D = S.getDiagnostics();
2709 Policy P;
2710
2711 // Note: The enabled checks should be kept in sync with the switch in
2712 // SemaPPCallbacks::PragmaDiagnostic().
2713 P.enableCheckUnreachable =
2714 PolicyOverrides.enableCheckUnreachable ||
2715 areAnyEnabled(D, Loc, Diags: warn_unreachable, Diags: warn_unreachable_break,
2716 Diags: warn_unreachable_return, Diags: warn_unreachable_loop_increment);
2717
2718 P.enableThreadSafetyAnalysis = PolicyOverrides.enableThreadSafetyAnalysis ||
2719 areAnyEnabled(D, Loc, Diags: warn_double_lock);
2720
2721 P.enableConsumedAnalysis = PolicyOverrides.enableConsumedAnalysis ||
2722 areAnyEnabled(D, Loc, Diags: warn_use_in_invalid_state);
2723 return P;
2724}
2725
2726void sema::AnalysisBasedWarnings::clearOverrides() {
2727 PolicyOverrides.enableCheckUnreachable = false;
2728 PolicyOverrides.enableConsumedAnalysis = false;
2729 PolicyOverrides.enableThreadSafetyAnalysis = false;
2730}
2731
2732static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2733 for (const auto &D : fscope->PossiblyUnreachableDiags)
2734 S.Diag(Loc: D.Loc, PD: D.PD);
2735}
2736
2737template <typename Iterator>
2738static void emitPossiblyUnreachableDiags(Sema &S, AnalysisDeclContext &AC,
2739 std::pair<Iterator, Iterator> PUDs) {
2740
2741 if (PUDs.first == PUDs.second)
2742 return;
2743
2744 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2745 for (const Stmt *S : I->Stmts)
2746 AC.registerForcedBlockExpression(stmt: S);
2747 }
2748
2749 if (AC.getCFG()) {
2750 CFGReverseBlockReachabilityAnalysis *Analysis =
2751 AC.getCFGReachablityAnalysis();
2752
2753 for (auto I = PUDs.first; I != PUDs.second; ++I) {
2754 const auto &D = *I;
2755 if (llvm::all_of(D.Stmts, [&](const Stmt *St) {
2756 const CFGBlock *Block = AC.getBlockForRegisteredExpression(stmt: St);
2757 // FIXME: We should be able to assert that block is non-null, but
2758 // the CFG analysis can skip potentially-evaluated expressions in
2759 // edge cases; see test/Sema/vla-2.c.
2760 if (Block && Analysis)
2761 if (!Analysis->isReachable(Src: &AC.getCFG()->getEntry(), Dst: Block))
2762 return false;
2763 return true;
2764 })) {
2765 S.Diag(D.Loc, D.PD);
2766 }
2767 }
2768 } else {
2769 for (auto I = PUDs.first; I != PUDs.second; ++I)
2770 S.Diag(I->Loc, I->PD);
2771 }
2772}
2773
2774void sema::AnalysisBasedWarnings::registerVarDeclWarning(
2775 VarDecl *VD, clang::sema::PossiblyUnreachableDiag PUD) {
2776 VarDeclPossiblyUnreachableDiags.emplace(args&: VD, args&: PUD);
2777}
2778
2779void sema::AnalysisBasedWarnings::issueWarningsForRegisteredVarDecl(
2780 VarDecl *VD) {
2781 if (!llvm::is_contained(Range&: VarDeclPossiblyUnreachableDiags, Element: VD))
2782 return;
2783
2784 AnalysisDeclContext AC(/*Mgr=*/nullptr, VD);
2785
2786 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2787 AC.getCFGBuildOptions().AddEHEdges = false;
2788 AC.getCFGBuildOptions().AddInitializers = true;
2789 AC.getCFGBuildOptions().AddImplicitDtors = true;
2790 AC.getCFGBuildOptions().AddTemporaryDtors = true;
2791 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2792 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2793
2794 auto Range = VarDeclPossiblyUnreachableDiags.equal_range(x: VD);
2795 auto SecondRange =
2796 llvm::make_second_range(c: llvm::make_range(x: Range.first, y: Range.second));
2797 emitPossiblyUnreachableDiags(
2798 S, AC, PUDs: std::make_pair(x: SecondRange.begin(), y: SecondRange.end()));
2799}
2800
2801// An AST Visitor that calls a callback function on each callable DEFINITION
2802// that is NOT in a dependent context:
2803class CallableVisitor : public DynamicRecursiveASTVisitor {
2804private:
2805 llvm::function_ref<void(const Decl *)> Callback;
2806 const Module *const TUModule;
2807
2808public:
2809 CallableVisitor(llvm::function_ref<void(const Decl *)> Callback,
2810 const Module *const TUModule)
2811 : Callback(Callback), TUModule(TUModule) {
2812 ShouldVisitTemplateInstantiations = true;
2813 ShouldVisitImplicitCode = false;
2814 }
2815
2816 bool TraverseDecl(Decl *Node) override {
2817 // For performance reasons, only validate the current translation unit's
2818 // module, and not modules it depends on.
2819 // See https://issues.chromium.org/issues/351909443 for details.
2820 if (Node && Node->getOwningModule() == TUModule)
2821 return DynamicRecursiveASTVisitor::TraverseDecl(D: Node);
2822 return true;
2823 }
2824
2825 bool VisitFunctionDecl(FunctionDecl *Node) override {
2826 if (cast<DeclContext>(Val: Node)->isDependentContext())
2827 return true; // Not to analyze dependent decl
2828 // `FunctionDecl->hasBody()` returns true if the function has a body
2829 // somewhere defined. But we want to know if this `Node` has a body
2830 // child. So we use `doesThisDeclarationHaveABody`:
2831 if (Node->doesThisDeclarationHaveABody())
2832 Callback(Node);
2833 return true;
2834 }
2835
2836 bool VisitBlockDecl(BlockDecl *Node) override {
2837 if (cast<DeclContext>(Val: Node)->isDependentContext())
2838 return true; // Not to analyze dependent decl
2839 Callback(Node);
2840 return true;
2841 }
2842
2843 bool VisitObjCMethodDecl(ObjCMethodDecl *Node) override {
2844 if (cast<DeclContext>(Val: Node)->isDependentContext())
2845 return true; // Not to analyze dependent decl
2846 if (Node->hasBody())
2847 Callback(Node);
2848 return true;
2849 }
2850
2851 bool VisitLambdaExpr(LambdaExpr *Node) override {
2852 return VisitFunctionDecl(Node: Node->getCallOperator());
2853 }
2854};
2855
2856namespace clang::lifetimes {
2857namespace {
2858class LifetimeSafetySemaHelperImpl : public LifetimeSafetySemaHelper {
2859
2860public:
2861 LifetimeSafetySemaHelperImpl(Sema &S) : S(S) {}
2862
2863 void reportUseAfterFree(const Expr *IssueExpr, const Expr *UseExpr,
2864 const Expr *MovedExpr, SourceLocation FreeLoc,
2865 Confidence) override {
2866 S.Diag(Loc: IssueExpr->getExprLoc(),
2867 DiagID: MovedExpr ? diag::warn_lifetime_safety_use_after_scope_moved
2868 : diag::warn_lifetime_safety_use_after_scope)
2869 << IssueExpr->getSourceRange();
2870 if (MovedExpr)
2871 S.Diag(Loc: MovedExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_moved_here)
2872 << MovedExpr->getSourceRange();
2873 S.Diag(Loc: FreeLoc, DiagID: diag::note_lifetime_safety_destroyed_here);
2874 S.Diag(Loc: UseExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_used_here)
2875 << UseExpr->getSourceRange();
2876 }
2877
2878 void reportUseAfterReturn(const Expr *IssueExpr, const Expr *ReturnExpr,
2879 const Expr *MovedExpr, SourceLocation ExpiryLoc,
2880 Confidence) override {
2881 S.Diag(Loc: IssueExpr->getExprLoc(),
2882 DiagID: MovedExpr ? diag::warn_lifetime_safety_return_stack_addr_moved
2883 : diag::warn_lifetime_safety_return_stack_addr)
2884 << IssueExpr->getSourceRange();
2885 if (MovedExpr)
2886 S.Diag(Loc: MovedExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_moved_here)
2887 << MovedExpr->getSourceRange();
2888 S.Diag(Loc: ReturnExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_returned_here)
2889 << ReturnExpr->getSourceRange();
2890 }
2891 void reportDanglingField(const Expr *IssueExpr,
2892 const FieldDecl *DanglingField,
2893 const Expr *MovedExpr,
2894 SourceLocation ExpiryLoc) override {
2895 S.Diag(Loc: IssueExpr->getExprLoc(),
2896 DiagID: MovedExpr ? diag::warn_lifetime_safety_dangling_field_moved
2897 : diag::warn_lifetime_safety_dangling_field)
2898 << IssueExpr->getSourceRange();
2899 if (MovedExpr)
2900 S.Diag(Loc: MovedExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_moved_here)
2901 << MovedExpr->getSourceRange();
2902 S.Diag(Loc: DanglingField->getLocation(),
2903 DiagID: diag::note_lifetime_safety_dangling_field_here)
2904 << DanglingField->getEndLoc();
2905 }
2906
2907 void reportUseAfterInvalidation(const Expr *IssueExpr, const Expr *UseExpr,
2908 const Expr *InvalidationExpr) override {
2909 S.Diag(Loc: IssueExpr->getExprLoc(), DiagID: diag::warn_lifetime_safety_invalidation)
2910 << false << IssueExpr->getSourceRange();
2911 S.Diag(Loc: InvalidationExpr->getExprLoc(),
2912 DiagID: diag::note_lifetime_safety_invalidated_here)
2913 << InvalidationExpr->getSourceRange();
2914 S.Diag(Loc: UseExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_used_here)
2915 << UseExpr->getSourceRange();
2916 }
2917 void reportUseAfterInvalidation(const ParmVarDecl *PVD, const Expr *UseExpr,
2918 const Expr *InvalidationExpr) override {
2919 S.Diag(Loc: PVD->getSourceRange().getBegin(),
2920 DiagID: diag::warn_lifetime_safety_invalidation)
2921 << true << PVD->getSourceRange();
2922 S.Diag(Loc: InvalidationExpr->getExprLoc(),
2923 DiagID: diag::note_lifetime_safety_invalidated_here)
2924 << InvalidationExpr->getSourceRange();
2925 S.Diag(Loc: UseExpr->getExprLoc(), DiagID: diag::note_lifetime_safety_used_here)
2926 << UseExpr->getSourceRange();
2927 }
2928
2929 void suggestLifetimeboundToParmVar(SuggestionScope Scope,
2930 const ParmVarDecl *ParmToAnnotate,
2931 const Expr *EscapeExpr) override {
2932 unsigned DiagID =
2933 (Scope == SuggestionScope::CrossTU)
2934 ? diag::warn_lifetime_safety_cross_tu_param_suggestion
2935 : diag::warn_lifetime_safety_intra_tu_param_suggestion;
2936 SourceLocation InsertionPoint = Lexer::getLocForEndOfToken(
2937 Loc: ParmToAnnotate->getEndLoc(), Offset: 0, SM: S.getSourceManager(), LangOpts: S.getLangOpts());
2938 StringRef FixItText = " [[clang::lifetimebound]]";
2939 if (!ParmToAnnotate->getIdentifier()) {
2940 // For unnamed parameters, placing attributes after the type would be
2941 // parsed as a type attribute, not a parameter attribute.
2942 InsertionPoint = ParmToAnnotate->getBeginLoc();
2943 FixItText = "[[clang::lifetimebound]] ";
2944 }
2945 S.Diag(Loc: ParmToAnnotate->getBeginLoc(), DiagID)
2946 << ParmToAnnotate->getSourceRange()
2947 << FixItHint::CreateInsertion(InsertionLoc: InsertionPoint, Code: FixItText);
2948 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2949 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2950 << EscapeExpr->getSourceRange();
2951 }
2952
2953 void suggestLifetimeboundToImplicitThis(SuggestionScope Scope,
2954 const CXXMethodDecl *MD,
2955 const Expr *EscapeExpr) override {
2956 unsigned DiagID = (Scope == SuggestionScope::CrossTU)
2957 ? diag::warn_lifetime_safety_cross_tu_this_suggestion
2958 : diag::warn_lifetime_safety_intra_tu_this_suggestion;
2959 const auto MDL = MD->getTypeSourceInfo()->getTypeLoc();
2960 SourceLocation InsertionPoint = Lexer::getLocForEndOfToken(
2961 Loc: MDL.getEndLoc(), Offset: 0, SM: S.getSourceManager(), LangOpts: S.getLangOpts());
2962 if (const auto *FPT = MD->getType()->getAs<FunctionProtoType>();
2963 FPT && FPT->hasTrailingReturn()) {
2964 // For trailing return types, 'getEndLoc()' includes the return type
2965 // after '->', placing the attribute in an invalid position.
2966 // Instead use 'getLocalRangeEnd()' which gives the '->' location
2967 // for trailing returns, so find the last token before it.
2968 const auto FTL = MDL.getAs<FunctionTypeLoc>();
2969 assert(FTL);
2970 InsertionPoint = Lexer::getLocForEndOfToken(
2971 Loc: Lexer::findPreviousToken(Loc: FTL.getLocalRangeEnd(), SM: S.getSourceManager(),
2972 LangOpts: S.getLangOpts(),
2973 /*IncludeComments=*/false)
2974 ->getLocation(),
2975 Offset: 0, SM: S.getSourceManager(), LangOpts: S.getLangOpts());
2976 }
2977 S.Diag(Loc: InsertionPoint, DiagID)
2978 << MD->getNameInfo().getSourceRange()
2979 << FixItHint::CreateInsertion(InsertionLoc: InsertionPoint,
2980 Code: " [[clang::lifetimebound]]");
2981 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2982 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2983 << EscapeExpr->getSourceRange();
2984 }
2985
2986 void reportNoescapeViolation(const ParmVarDecl *ParmWithNoescape,
2987 const Expr *EscapeExpr) override {
2988 S.Diag(Loc: ParmWithNoescape->getBeginLoc(),
2989 DiagID: diag::warn_lifetime_safety_noescape_escapes)
2990 << ParmWithNoescape->getSourceRange();
2991
2992 S.Diag(Loc: EscapeExpr->getBeginLoc(),
2993 DiagID: diag::note_lifetime_safety_suggestion_returned_here)
2994 << EscapeExpr->getSourceRange();
2995 }
2996
2997 void reportNoescapeViolation(const ParmVarDecl *ParmWithNoescape,
2998 const FieldDecl *EscapeField) override {
2999 S.Diag(Loc: ParmWithNoescape->getBeginLoc(),
3000 DiagID: diag::warn_lifetime_safety_noescape_escapes)
3001 << ParmWithNoescape->getSourceRange();
3002
3003 S.Diag(Loc: EscapeField->getLocation(),
3004 DiagID: diag::note_lifetime_safety_escapes_to_field_here)
3005 << EscapeField->getEndLoc();
3006 }
3007
3008 void addLifetimeBoundToImplicitThis(const CXXMethodDecl *MD) override {
3009 S.addLifetimeBoundToImplicitThis(MD: const_cast<CXXMethodDecl *>(MD));
3010 }
3011
3012private:
3013 Sema &S;
3014};
3015} // namespace
3016} // namespace clang::lifetimes
3017
3018static void
3019LifetimeSafetyTUAnalysis(Sema &S, TranslationUnitDecl *TU,
3020 clang::lifetimes::LifetimeSafetyStats &LSStats) {
3021 llvm::TimeTraceScope TimeProfile("LifetimeSafetyTUAnalysis");
3022 CallGraph CG;
3023 CG.addToCallGraph(D: TU);
3024 lifetimes::LifetimeSafetySemaHelperImpl SemaHelper(S);
3025 for (auto *Node : llvm::post_order(G: &CG)) {
3026 const clang::FunctionDecl *CanonicalFD =
3027 dyn_cast_or_null<clang::FunctionDecl>(Val: Node->getDecl());
3028 if (!CanonicalFD)
3029 continue;
3030 const FunctionDecl *FD = CanonicalFD->getDefinition();
3031 if (!FD)
3032 continue;
3033 AnalysisDeclContext AC(nullptr, FD);
3034 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = false;
3035 AC.getCFGBuildOptions().AddLifetime = true;
3036 AC.getCFGBuildOptions().AddParameterLifetimes = true;
3037 AC.getCFGBuildOptions().setAllAlwaysAdd();
3038 if (AC.getCFG())
3039 runLifetimeSafetyAnalysis(AC, SemaHelper: &SemaHelper, Stats&: LSStats, CollectStats: S.CollectStats);
3040 }
3041}
3042
3043void clang::sema::AnalysisBasedWarnings::IssueWarnings(
3044 TranslationUnitDecl *TU) {
3045 if (!TU)
3046 return; // This is unexpected, give up quietly.
3047
3048 DiagnosticsEngine &Diags = S.getDiagnostics();
3049
3050 if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
3051 // exit if having uncompilable errors or ignoring all warnings:
3052 return;
3053
3054 DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
3055
3056 // UnsafeBufferUsage analysis settings.
3057 bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
3058 bool UnsafeBufferUsageShouldEmitSuggestions = // Should != Can.
3059 UnsafeBufferUsageCanEmitSuggestions &&
3060 DiagOpts.ShowSafeBufferUsageSuggestions;
3061 bool UnsafeBufferUsageShouldSuggestSuggestions =
3062 UnsafeBufferUsageCanEmitSuggestions &&
3063 !DiagOpts.ShowSafeBufferUsageSuggestions;
3064 UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
3065
3066 // The Callback function that performs analyses:
3067 auto CallAnalyzers = [&](const Decl *Node) -> void {
3068 if (Node->hasAttr<UnsafeBufferUsageAttr>())
3069 return;
3070
3071 // Perform unsafe buffer usage analysis:
3072 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation,
3073 Loc: Node->getBeginLoc()) ||
3074 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable,
3075 Loc: Node->getBeginLoc()) ||
3076 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
3077 Loc: Node->getBeginLoc()) ||
3078 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call,
3079 Loc: Node->getBeginLoc())) {
3080 clang::checkUnsafeBufferUsage(D: Node, Handler&: R,
3081 EmitSuggestions: UnsafeBufferUsageShouldEmitSuggestions);
3082 }
3083
3084 // More analysis ...
3085 };
3086 // Emit per-function analysis-based warnings that require the whole-TU
3087 // reasoning. Check if any of them is enabled at all before scanning the AST:
3088 if (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_operation, Loc: SourceLocation()) ||
3089 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_variable, Loc: SourceLocation()) ||
3090 !Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_usage_in_container,
3091 Loc: SourceLocation()) ||
3092 (!Diags.isIgnored(DiagID: diag::warn_unsafe_buffer_libc_call, Loc: SourceLocation()) &&
3093 S.getLangOpts().CPlusPlus /* only warn about libc calls in C++ */)) {
3094 CallableVisitor(CallAnalyzers, TU->getOwningModule())
3095 .TraverseTranslationUnitDecl(D: TU);
3096 }
3097
3098 if (S.getLangOpts().EnableLifetimeSafety && S.getLangOpts().CPlusPlus &&
3099 S.getLangOpts().EnableLifetimeSafetyTUAnalysis)
3100 LifetimeSafetyTUAnalysis(S, TU, LSStats);
3101}
3102
3103void clang::sema::AnalysisBasedWarnings::IssueWarnings(
3104 sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
3105 const Decl *D, QualType BlockType) {
3106
3107 // We avoid doing analysis-based warnings when there are errors for
3108 // two reasons:
3109 // (1) The CFGs often can't be constructed (if the body is invalid), so
3110 // don't bother trying.
3111 // (2) The code already has problems; running the analysis just takes more
3112 // time.
3113 DiagnosticsEngine &Diags = S.getDiagnostics();
3114
3115 // Do not do any analysis if we are going to just ignore them.
3116 if (Diags.getIgnoreAllWarnings() ||
3117 (Diags.getSuppressSystemWarnings() &&
3118 S.SourceMgr.isInSystemHeader(Loc: D->getLocation())))
3119 return;
3120
3121 // For code in dependent contexts, we'll do this at instantiation time.
3122 if (cast<DeclContext>(Val: D)->isDependentContext())
3123 return;
3124
3125 if (S.hasUncompilableErrorOccurred()) {
3126 // Flush out any possibly unreachable diagnostics.
3127 flushDiagnostics(S, fscope);
3128 return;
3129 }
3130
3131 const Stmt *Body = D->getBody();
3132 assert(Body);
3133
3134 // Construct the analysis context with the specified CFG build options.
3135 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
3136
3137 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
3138 // explosion for destructors that can result and the compile time hit.
3139 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
3140 AC.getCFGBuildOptions().AddEHEdges = false;
3141 AC.getCFGBuildOptions().AddInitializers = true;
3142 AC.getCFGBuildOptions().AddImplicitDtors = true;
3143 AC.getCFGBuildOptions().AddParameterLifetimes = true;
3144 AC.getCFGBuildOptions().AddTemporaryDtors = true;
3145 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
3146 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
3147
3148 bool IsLifetimeSafetyDiagnosticEnabled =
3149 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_use_after_scope,
3150 Loc: D->getBeginLoc()) ||
3151 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_use_after_scope_moved,
3152 Loc: D->getBeginLoc()) ||
3153 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_return_stack_addr,
3154 Loc: D->getBeginLoc()) ||
3155 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_return_stack_addr_moved,
3156 Loc: D->getBeginLoc()) ||
3157 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_invalidation,
3158 Loc: D->getBeginLoc()) ||
3159 !Diags.isIgnored(DiagID: diag::warn_lifetime_safety_noescape_escapes,
3160 Loc: D->getBeginLoc());
3161 bool EnableLifetimeSafetyAnalysis =
3162 S.getLangOpts().EnableLifetimeSafety &&
3163 !S.getLangOpts().EnableLifetimeSafetyTUAnalysis &&
3164 IsLifetimeSafetyDiagnosticEnabled;
3165
3166 // Force that certain expressions appear as CFGElements in the CFG. This
3167 // is used to speed up various analyses.
3168 // FIXME: This isn't the right factoring. This is here for initial
3169 // prototyping, but we need a way for analyses to say what expressions they
3170 // expect to always be CFGElements and then fill in the BuildOptions
3171 // appropriately. This is essentially a layering violation.
3172 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
3173 P.enableConsumedAnalysis || EnableLifetimeSafetyAnalysis) {
3174 // Unreachable code analysis and thread safety require a linearized CFG.
3175 AC.getCFGBuildOptions().setAllAlwaysAdd();
3176 } else {
3177 AC.getCFGBuildOptions()
3178 .setAlwaysAdd(stmtClass: Stmt::BinaryOperatorClass)
3179 .setAlwaysAdd(stmtClass: Stmt::CompoundAssignOperatorClass)
3180 .setAlwaysAdd(stmtClass: Stmt::BlockExprClass)
3181 .setAlwaysAdd(stmtClass: Stmt::CStyleCastExprClass)
3182 .setAlwaysAdd(stmtClass: Stmt::DeclRefExprClass)
3183 .setAlwaysAdd(stmtClass: Stmt::ImplicitCastExprClass)
3184 .setAlwaysAdd(stmtClass: Stmt::UnaryOperatorClass);
3185 }
3186 if (EnableLifetimeSafetyAnalysis)
3187 AC.getCFGBuildOptions().AddLifetime = true;
3188
3189 // Install the logical handler.
3190 std::optional<LogicalErrorHandler> LEH;
3191 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3192 LEH.emplace(args&: S);
3193 AC.getCFGBuildOptions().Observer = &*LEH;
3194 }
3195
3196 // Emit delayed diagnostics.
3197 auto &PUDs = fscope->PossiblyUnreachableDiags;
3198 emitPossiblyUnreachableDiags(S, AC, PUDs: std::make_pair(x: PUDs.begin(), y: PUDs.end()));
3199
3200 // Warning: check missing 'return'
3201 if (P.enableCheckFallThrough) {
3202 const CheckFallThroughDiagnostics &CD =
3203 (isa<BlockDecl>(Val: D) ? CheckFallThroughDiagnostics::MakeForBlock()
3204 : (isa<CXXMethodDecl>(Val: D) &&
3205 cast<CXXMethodDecl>(Val: D)->getOverloadedOperator() == OO_Call &&
3206 cast<CXXMethodDecl>(Val: D)->getParent()->isLambda())
3207 ? CheckFallThroughDiagnostics::MakeForLambda()
3208 : (fscope->isCoroutine()
3209 ? CheckFallThroughDiagnostics::MakeForCoroutine(Func: D)
3210 : CheckFallThroughDiagnostics::MakeForFunction(S, Func: D)));
3211 CheckFallThroughForBody(S, D, Body, BlockType, CD, AC);
3212 }
3213
3214 // Warning: check for unreachable code
3215 if (P.enableCheckUnreachable) {
3216 // Only check for unreachable code on non-template instantiations.
3217 // Different template instantiations can effectively change the control-flow
3218 // and it is very difficult to prove that a snippet of code in a template
3219 // is unreachable for all instantiations.
3220 bool isTemplateInstantiation = false;
3221 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: D))
3222 isTemplateInstantiation = Function->isTemplateInstantiation();
3223 if (!isTemplateInstantiation)
3224 CheckUnreachable(S, AC);
3225 }
3226
3227 // Check for thread safety violations
3228 if (P.enableThreadSafetyAnalysis) {
3229 SourceLocation FL = AC.getDecl()->getLocation();
3230 SourceLocation FEL = AC.getDecl()->getEndLoc();
3231 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
3232 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_beta, Loc: D->getBeginLoc()))
3233 Reporter.setIssueBetaWarnings(true);
3234 if (!Diags.isIgnored(DiagID: diag::warn_thread_safety_verbose, Loc: D->getBeginLoc()))
3235 Reporter.setVerbose(true);
3236
3237 threadSafety::runThreadSafetyAnalysis(AC, Handler&: Reporter,
3238 Bset: &S.ThreadSafetyDeclCache);
3239 Reporter.emitDiagnostics();
3240 }
3241
3242 // Check for violations of consumed properties.
3243 if (P.enableConsumedAnalysis) {
3244 consumed::ConsumedWarningsHandler WarningHandler(S);
3245 consumed::ConsumedAnalyzer Analyzer(WarningHandler);
3246 Analyzer.run(AC);
3247 }
3248
3249 if (!Diags.isIgnored(DiagID: diag::warn_uninit_var, Loc: D->getBeginLoc()) ||
3250 !Diags.isIgnored(DiagID: diag::warn_sometimes_uninit_var, Loc: D->getBeginLoc()) ||
3251 !Diags.isIgnored(DiagID: diag::warn_maybe_uninit_var, Loc: D->getBeginLoc()) ||
3252 !Diags.isIgnored(DiagID: diag::warn_uninit_const_reference, Loc: D->getBeginLoc()) ||
3253 !Diags.isIgnored(DiagID: diag::warn_uninit_const_pointer, Loc: D->getBeginLoc())) {
3254 if (CFG *cfg = AC.getCFG()) {
3255 UninitValsDiagReporter reporter(S);
3256 UninitVariablesAnalysisStats stats;
3257 std::memset(s: &stats, c: 0, n: sizeof(UninitVariablesAnalysisStats));
3258 runUninitializedVariablesAnalysis(dc: *cast<DeclContext>(Val: D), cfg: *cfg, ac&: AC,
3259 handler&: reporter, stats);
3260
3261 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
3262 ++NumUninitAnalysisFunctions;
3263 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
3264 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
3265 MaxUninitAnalysisVariablesPerFunction =
3266 std::max(a: MaxUninitAnalysisVariablesPerFunction,
3267 b: stats.NumVariablesAnalyzed);
3268 MaxUninitAnalysisBlockVisitsPerFunction =
3269 std::max(a: MaxUninitAnalysisBlockVisitsPerFunction,
3270 b: stats.NumBlockVisits);
3271 }
3272 }
3273 }
3274
3275 // TODO: Enable lifetime safety analysis for other languages once it is
3276 // stable.
3277 if (EnableLifetimeSafetyAnalysis && S.getLangOpts().CPlusPlus) {
3278 if (AC.getCFG()) {
3279 lifetimes::LifetimeSafetySemaHelperImpl LifetimeSafetySemaHelper(S);
3280 lifetimes::runLifetimeSafetyAnalysis(AC, SemaHelper: &LifetimeSafetySemaHelper,
3281 Stats&: LSStats, CollectStats: S.CollectStats);
3282 }
3283 }
3284 // Check for violations of "called once" parameter properties.
3285 if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
3286 shouldAnalyzeCalledOnceParameters(Diags, At: D->getBeginLoc())) {
3287 if (AC.getCFG()) {
3288 CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
3289 checkCalledOnceParameters(
3290 AC, Handler&: Reporter,
3291 CheckConventionalParameters: shouldAnalyzeCalledOnceConventions(Diags, At: D->getBeginLoc()));
3292 }
3293 }
3294
3295 bool FallThroughDiagFull =
3296 !Diags.isIgnored(DiagID: diag::warn_unannotated_fallthrough, Loc: D->getBeginLoc());
3297 bool FallThroughDiagPerFunction = !Diags.isIgnored(
3298 DiagID: diag::warn_unannotated_fallthrough_per_function, Loc: D->getBeginLoc());
3299 if (FallThroughDiagFull || FallThroughDiagPerFunction ||
3300 fscope->HasFallthroughStmt) {
3301 DiagnoseSwitchLabelsFallthrough(S, AC, PerFunction: !FallThroughDiagFull);
3302 }
3303
3304 if (S.getLangOpts().ObjCWeak &&
3305 !Diags.isIgnored(DiagID: diag::warn_arc_repeated_use_of_weak, Loc: D->getBeginLoc()))
3306 diagnoseRepeatedUseOfWeak(S, CurFn: fscope, D, PM: AC.getParentMap());
3307
3308
3309 // Check for infinite self-recursion in functions
3310 if (!Diags.isIgnored(DiagID: diag::warn_infinite_recursive_function,
3311 Loc: D->getBeginLoc())) {
3312 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
3313 checkRecursiveFunction(S, FD, Body, AC);
3314 }
3315 }
3316
3317 // Check for throw out of non-throwing function.
3318 if (!Diags.isIgnored(DiagID: diag::warn_throw_in_noexcept_func, Loc: D->getBeginLoc()))
3319 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D))
3320 if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
3321 checkThrowInNonThrowingFunc(S, FD, AC);
3322
3323 // If none of the previous checks caused a CFG build, trigger one here
3324 // for the logical error handler.
3325 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
3326 AC.getCFG();
3327 }
3328
3329 // Clear any of our policy overrides.
3330 clearOverrides();
3331
3332 // Collect statistics about the CFG if it was built.
3333 if (S.CollectStats && AC.isCFGBuilt()) {
3334 ++NumFunctionsAnalyzed;
3335 if (CFG *cfg = AC.getCFG()) {
3336 // If we successfully built a CFG for this context, record some more
3337 // detail information about it.
3338 NumCFGBlocks += cfg->getNumBlockIDs();
3339 MaxCFGBlocksPerFunction = std::max(a: MaxCFGBlocksPerFunction,
3340 b: cfg->getNumBlockIDs());
3341 } else {
3342 ++NumFunctionsWithBadCFGs;
3343 }
3344 }
3345}
3346
3347void clang::sema::AnalysisBasedWarnings::PrintStats() const {
3348 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
3349
3350 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
3351 unsigned AvgCFGBlocksPerFunction =
3352 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
3353 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
3354 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
3355 << " " << NumCFGBlocks << " CFG blocks built.\n"
3356 << " " << AvgCFGBlocksPerFunction
3357 << " average CFG blocks per function.\n"
3358 << " " << MaxCFGBlocksPerFunction
3359 << " max CFG blocks per function.\n";
3360
3361 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
3362 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
3363 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
3364 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
3365 llvm::errs() << NumUninitAnalysisFunctions
3366 << " functions analyzed for uninitialiazed variables\n"
3367 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
3368 << " " << AvgUninitVariablesPerFunction
3369 << " average variables per function.\n"
3370 << " " << MaxUninitAnalysisVariablesPerFunction
3371 << " max variables per function.\n"
3372 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
3373 << " " << AvgUninitBlockVisitsPerFunction
3374 << " average block visits per function.\n"
3375 << " " << MaxUninitAnalysisBlockVisitsPerFunction
3376 << " max block visits per function.\n";
3377 clang::lifetimes::printStats(Stats: LSStats);
3378}
3379