| 1 | //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements the JumpScopeChecker class, which is used to diagnose |
| 10 | // jumps that enter a protected scope in an invalid way. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "clang/AST/DeclCXX.h" |
| 15 | #include "clang/AST/Expr.h" |
| 16 | #include "clang/AST/ExprCXX.h" |
| 17 | #include "clang/AST/StmtCXX.h" |
| 18 | #include "clang/AST/StmtObjC.h" |
| 19 | #include "clang/AST/StmtOpenACC.h" |
| 20 | #include "clang/AST/StmtOpenMP.h" |
| 21 | #include "clang/Basic/SourceLocation.h" |
| 22 | #include "clang/Sema/SemaInternal.h" |
| 23 | #include "llvm/ADT/BitVector.h" |
| 24 | using namespace clang; |
| 25 | |
| 26 | namespace { |
| 27 | |
| 28 | /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps |
| 29 | /// into VLA and other protected scopes. For example, this rejects: |
| 30 | /// goto L; |
| 31 | /// int a[n]; |
| 32 | /// L: |
| 33 | /// |
| 34 | /// We also detect jumps out of protected scopes when it's not possible to do |
| 35 | /// cleanups properly. Indirect jumps and ASM jumps can't do cleanups because |
| 36 | /// the target is unknown. Return statements with \c [[clang::musttail]] cannot |
| 37 | /// handle any cleanups due to the nature of a tail call. |
| 38 | class JumpScopeChecker { |
| 39 | Sema &S; |
| 40 | |
| 41 | /// Permissive - True when recovering from errors, in which case precautions |
| 42 | /// are taken to handle incomplete scope information. |
| 43 | const bool Permissive; |
| 44 | |
| 45 | /// GotoScope - This is a record that we use to keep track of all of the |
| 46 | /// scopes that are introduced by VLAs and other things that scope jumps like |
| 47 | /// gotos. This scope tree has nothing to do with the source scope tree, |
| 48 | /// because you can have multiple VLA scopes per compound statement, and most |
| 49 | /// compound statements don't introduce any scopes. |
| 50 | struct GotoScope { |
| 51 | /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for |
| 52 | /// the parent scope is the function body. |
| 53 | unsigned ParentScope; |
| 54 | |
| 55 | /// InDiag - The note to emit if there is a jump into this scope. |
| 56 | unsigned InDiag; |
| 57 | |
| 58 | /// OutDiag - The note to emit if there is an indirect jump out |
| 59 | /// of this scope. Direct jumps always clean up their current scope |
| 60 | /// in an orderly way. |
| 61 | unsigned OutDiag; |
| 62 | |
| 63 | /// Loc - Location to emit the diagnostic. |
| 64 | SourceLocation Loc; |
| 65 | |
| 66 | GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag, |
| 67 | SourceLocation L) |
| 68 | : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {} |
| 69 | }; |
| 70 | |
| 71 | SmallVector<GotoScope, 48> Scopes; |
| 72 | llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes; |
| 73 | SmallVector<Stmt*, 16> Jumps; |
| 74 | |
| 75 | SmallVector<Stmt*, 4> IndirectJumps; |
| 76 | SmallVector<LabelDecl *, 4> IndirectJumpTargets; |
| 77 | SmallVector<AttributedStmt *, 4> MustTailStmts; |
| 78 | |
| 79 | public: |
| 80 | JumpScopeChecker(Stmt *Body, Sema &S); |
| 81 | private: |
| 82 | void BuildScopeInformation(Decl *D, unsigned &ParentScope); |
| 83 | void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl, |
| 84 | unsigned &ParentScope); |
| 85 | void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope); |
| 86 | void BuildScopeInformation(Stmt *S, unsigned &origParentScope); |
| 87 | |
| 88 | void VerifyJumps(); |
| 89 | void VerifyIndirectJumps(); |
| 90 | void VerifyMustTailStmts(); |
| 91 | void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes); |
| 92 | void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target, |
| 93 | unsigned TargetScope); |
| 94 | void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc, |
| 95 | unsigned JumpDiag, unsigned JumpDiagWarning, |
| 96 | unsigned JumpDiagCompat); |
| 97 | void CheckGotoStmt(GotoStmt *GS); |
| 98 | const Attr *GetMustTailAttr(AttributedStmt *AS); |
| 99 | |
| 100 | unsigned GetDeepestCommonScope(unsigned A, unsigned B); |
| 101 | }; |
| 102 | } // end anonymous namespace |
| 103 | |
| 104 | #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x))) |
| 105 | |
| 106 | JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s) |
| 107 | : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) { |
| 108 | // Add a scope entry for function scope. |
| 109 | Scopes.push_back(Elt: GotoScope(~0U, ~0U, ~0U, SourceLocation())); |
| 110 | |
| 111 | // Build information for the top level compound statement, so that we have a |
| 112 | // defined scope record for every "goto" and label. |
| 113 | unsigned BodyParentScope = 0; |
| 114 | BuildScopeInformation(S: Body, origParentScope&: BodyParentScope); |
| 115 | |
| 116 | // Check that all jumps we saw are kosher. |
| 117 | VerifyJumps(); |
| 118 | VerifyIndirectJumps(); |
| 119 | VerifyMustTailStmts(); |
| 120 | } |
| 121 | |
| 122 | /// GetDeepestCommonScope - Finds the innermost scope enclosing the |
| 123 | /// two scopes. |
| 124 | unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) { |
| 125 | while (A != B) { |
| 126 | // Inner scopes are created after outer scopes and therefore have |
| 127 | // higher indices. |
| 128 | if (A < B) { |
| 129 | assert(Scopes[B].ParentScope < B); |
| 130 | B = Scopes[B].ParentScope; |
| 131 | } else { |
| 132 | assert(Scopes[A].ParentScope < A); |
| 133 | A = Scopes[A].ParentScope; |
| 134 | } |
| 135 | } |
| 136 | return A; |
| 137 | } |
| 138 | |
| 139 | typedef std::pair<unsigned,unsigned> ScopePair; |
| 140 | |
| 141 | /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a |
| 142 | /// diagnostic that should be emitted if control goes over it. If not, return 0. |
| 143 | static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) { |
| 144 | if (const VarDecl *VD = dyn_cast<VarDecl>(Val: D)) { |
| 145 | unsigned InDiag = 0; |
| 146 | unsigned OutDiag = 0; |
| 147 | |
| 148 | if (VD->getType()->isVariablyModifiedType()) |
| 149 | InDiag = diag::note_protected_by_vla; |
| 150 | |
| 151 | if (VD->hasAttr<BlocksAttr>()) |
| 152 | return ScopePair(diag::note_protected_by___block, |
| 153 | diag::note_exits___block); |
| 154 | |
| 155 | if (VD->hasAttr<CleanupAttr>()) |
| 156 | return ScopePair(diag::note_protected_by_cleanup, |
| 157 | diag::note_exits_cleanup); |
| 158 | |
| 159 | if (VD->hasLocalStorage()) { |
| 160 | switch (VD->getType().isDestructedType()) { |
| 161 | case QualType::DK_objc_strong_lifetime: |
| 162 | return ScopePair(diag::note_protected_by_objc_strong_init, |
| 163 | diag::note_exits_objc_strong); |
| 164 | |
| 165 | case QualType::DK_objc_weak_lifetime: |
| 166 | return ScopePair(diag::note_protected_by_objc_weak_init, |
| 167 | diag::note_exits_objc_weak); |
| 168 | |
| 169 | case QualType::DK_nontrivial_c_struct: |
| 170 | return ScopePair(diag::note_protected_by_non_trivial_c_struct_init, |
| 171 | diag::note_exits_dtor); |
| 172 | |
| 173 | case QualType::DK_cxx_destructor: |
| 174 | OutDiag = diag::note_exits_dtor; |
| 175 | break; |
| 176 | |
| 177 | case QualType::DK_none: |
| 178 | break; |
| 179 | } |
| 180 | } |
| 181 | |
| 182 | // An earlier diag::note_protected_by_vla is more severe, so don't overwrite |
| 183 | // it here. |
| 184 | if (const Expr *Init = VD->getInit(); |
| 185 | !InDiag && VD->hasLocalStorage() && Init && !Init->containsErrors()) { |
| 186 | // C++11 [stmt.dcl]p3: |
| 187 | // A program that jumps from a point where a variable with automatic |
| 188 | // storage duration is not in scope to a point where it is in scope |
| 189 | // is ill-formed unless the variable has scalar type, class type with |
| 190 | // a trivial default constructor and a trivial destructor, a |
| 191 | // cv-qualified version of one of these types, or an array of one of |
| 192 | // the preceding types and is declared without an initializer. |
| 193 | |
| 194 | // C++03 [stmt.dcl.p3: |
| 195 | // A program that jumps from a point where a local variable |
| 196 | // with automatic storage duration is not in scope to a point |
| 197 | // where it is in scope is ill-formed unless the variable has |
| 198 | // POD type and is declared without an initializer. |
| 199 | |
| 200 | InDiag = diag::note_protected_by_variable_init; |
| 201 | |
| 202 | // For a variable of (array of) class type declared without an |
| 203 | // initializer, we will have call-style initialization and the initializer |
| 204 | // will be the CXXConstructExpr with no intervening nodes. |
| 205 | if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Val: Init)) { |
| 206 | const CXXConstructorDecl *Ctor = CCE->getConstructor(); |
| 207 | if (Ctor->isTrivial() && Ctor->isDefaultConstructor() && |
| 208 | VD->getInitStyle() == VarDecl::CallInit) { |
| 209 | if (OutDiag) |
| 210 | InDiag = diag::note_protected_by_variable_nontriv_destructor; |
| 211 | else if (!Ctor->getParent()->isPOD()) |
| 212 | InDiag = diag::note_protected_by_variable_non_pod; |
| 213 | else |
| 214 | InDiag = 0; |
| 215 | } |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | return ScopePair(InDiag, OutDiag); |
| 220 | } |
| 221 | |
| 222 | if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(Val: D)) { |
| 223 | if (TD->getUnderlyingType()->isVariablyModifiedType()) |
| 224 | return ScopePair(isa<TypedefDecl>(Val: TD) |
| 225 | ? diag::note_protected_by_vla_typedef |
| 226 | : diag::note_protected_by_vla_type_alias, |
| 227 | 0); |
| 228 | } |
| 229 | |
| 230 | return ScopePair(0U, 0U); |
| 231 | } |
| 232 | |
| 233 | /// Build scope information for a declaration that is part of a DeclStmt. |
| 234 | void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) { |
| 235 | // If this decl causes a new scope, push and switch to it. |
| 236 | std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D); |
| 237 | if (Diags.first || Diags.second) { |
| 238 | Scopes.push_back(Elt: GotoScope(ParentScope, Diags.first, Diags.second, |
| 239 | D->getLocation())); |
| 240 | ParentScope = Scopes.size()-1; |
| 241 | } |
| 242 | |
| 243 | // If the decl has an initializer, walk it with the potentially new |
| 244 | // scope we just installed. |
| 245 | if (VarDecl *VD = dyn_cast<VarDecl>(Val: D)) |
| 246 | if (Expr *Init = VD->getInit()) |
| 247 | BuildScopeInformation(S: Init, origParentScope&: ParentScope); |
| 248 | } |
| 249 | |
| 250 | /// Build scope information for a captured block literal variables. |
| 251 | void JumpScopeChecker::BuildScopeInformation(VarDecl *D, |
| 252 | const BlockDecl *BDecl, |
| 253 | unsigned &ParentScope) { |
| 254 | // exclude captured __block variables; there's no destructor |
| 255 | // associated with the block literal for them. |
| 256 | if (D->hasAttr<BlocksAttr>()) |
| 257 | return; |
| 258 | QualType T = D->getType(); |
| 259 | QualType::DestructionKind destructKind = T.isDestructedType(); |
| 260 | if (destructKind != QualType::DK_none) { |
| 261 | std::pair<unsigned,unsigned> Diags; |
| 262 | switch (destructKind) { |
| 263 | case QualType::DK_cxx_destructor: |
| 264 | Diags = ScopePair(diag::note_enters_block_captures_cxx_obj, |
| 265 | diag::note_exits_block_captures_cxx_obj); |
| 266 | break; |
| 267 | case QualType::DK_objc_strong_lifetime: |
| 268 | Diags = ScopePair(diag::note_enters_block_captures_strong, |
| 269 | diag::note_exits_block_captures_strong); |
| 270 | break; |
| 271 | case QualType::DK_objc_weak_lifetime: |
| 272 | Diags = ScopePair(diag::note_enters_block_captures_weak, |
| 273 | diag::note_exits_block_captures_weak); |
| 274 | break; |
| 275 | case QualType::DK_nontrivial_c_struct: |
| 276 | Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct, |
| 277 | diag::note_exits_block_captures_non_trivial_c_struct); |
| 278 | break; |
| 279 | case QualType::DK_none: |
| 280 | llvm_unreachable("non-lifetime captured variable" ); |
| 281 | } |
| 282 | SourceLocation Loc = D->getLocation(); |
| 283 | if (Loc.isInvalid()) |
| 284 | Loc = BDecl->getLocation(); |
| 285 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 286 | Diags.first, Diags.second, Loc)); |
| 287 | ParentScope = Scopes.size()-1; |
| 288 | } |
| 289 | } |
| 290 | |
| 291 | /// Build scope information for compound literals of C struct types that are |
| 292 | /// non-trivial to destruct. |
| 293 | void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE, |
| 294 | unsigned &ParentScope) { |
| 295 | unsigned InDiag = diag::note_enters_compound_literal_scope; |
| 296 | unsigned OutDiag = diag::note_exits_compound_literal_scope; |
| 297 | Scopes.push_back(Elt: GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc())); |
| 298 | ParentScope = Scopes.size() - 1; |
| 299 | } |
| 300 | |
| 301 | /// BuildScopeInformation - The statements from CI to CE are known to form a |
| 302 | /// coherent VLA scope with a specified parent node. Walk through the |
| 303 | /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively |
| 304 | /// walking the AST as needed. |
| 305 | void JumpScopeChecker::BuildScopeInformation(Stmt *S, |
| 306 | unsigned &origParentScope) { |
| 307 | // If this is a statement, rather than an expression, scopes within it don't |
| 308 | // propagate out into the enclosing scope. Otherwise we have to worry |
| 309 | // about block literals, which have the lifetime of their enclosing statement. |
| 310 | unsigned independentParentScope = origParentScope; |
| 311 | unsigned &ParentScope = ((isa<Expr>(Val: S) && !isa<StmtExpr>(Val: S)) |
| 312 | ? origParentScope : independentParentScope); |
| 313 | |
| 314 | unsigned StmtsToSkip = 0u; |
| 315 | |
| 316 | // If we found a label, remember that it is in ParentScope scope. |
| 317 | switch (S->getStmtClass()) { |
| 318 | case Stmt::AddrLabelExprClass: |
| 319 | IndirectJumpTargets.push_back(Elt: cast<AddrLabelExpr>(Val: S)->getLabel()); |
| 320 | break; |
| 321 | |
| 322 | case Stmt::ObjCForCollectionStmtClass: { |
| 323 | auto *CS = cast<ObjCForCollectionStmt>(Val: S); |
| 324 | unsigned Diag = diag::note_protected_by_objc_fast_enumeration; |
| 325 | unsigned NewParentScope = Scopes.size(); |
| 326 | Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, S->getBeginLoc())); |
| 327 | BuildScopeInformation(S: CS->getBody(), origParentScope&: NewParentScope); |
| 328 | return; |
| 329 | } |
| 330 | |
| 331 | case Stmt::IndirectGotoStmtClass: |
| 332 | // "goto *&&lbl;" is a special case which we treat as equivalent |
| 333 | // to a normal goto. In addition, we don't calculate scope in the |
| 334 | // operand (to avoid recording the address-of-label use), which |
| 335 | // works only because of the restricted set of expressions which |
| 336 | // we detect as constant targets. |
| 337 | if (cast<IndirectGotoStmt>(Val: S)->getConstantTarget()) |
| 338 | goto RecordJumpScope; |
| 339 | |
| 340 | LabelAndGotoScopes[S] = ParentScope; |
| 341 | IndirectJumps.push_back(Elt: S); |
| 342 | break; |
| 343 | |
| 344 | case Stmt::SwitchStmtClass: |
| 345 | // Evaluate the C++17 init stmt and condition variable |
| 346 | // before entering the scope of the switch statement. |
| 347 | if (Stmt *Init = cast<SwitchStmt>(Val: S)->getInit()) { |
| 348 | BuildScopeInformation(S: Init, origParentScope&: ParentScope); |
| 349 | ++StmtsToSkip; |
| 350 | } |
| 351 | if (VarDecl *Var = cast<SwitchStmt>(Val: S)->getConditionVariable()) { |
| 352 | BuildScopeInformation(D: Var, ParentScope); |
| 353 | ++StmtsToSkip; |
| 354 | } |
| 355 | goto RecordJumpScope; |
| 356 | |
| 357 | case Stmt::GCCAsmStmtClass: |
| 358 | if (!cast<GCCAsmStmt>(Val: S)->isAsmGoto()) |
| 359 | break; |
| 360 | [[fallthrough]]; |
| 361 | |
| 362 | case Stmt::GotoStmtClass: |
| 363 | RecordJumpScope: |
| 364 | // Remember both what scope a goto is in as well as the fact that we have |
| 365 | // it. This makes the second scan not have to walk the AST again. |
| 366 | LabelAndGotoScopes[S] = ParentScope; |
| 367 | Jumps.push_back(Elt: S); |
| 368 | break; |
| 369 | |
| 370 | case Stmt::IfStmtClass: { |
| 371 | IfStmt *IS = cast<IfStmt>(Val: S); |
| 372 | if (!(IS->isConstexpr() || IS->isConsteval() || |
| 373 | IS->isObjCAvailabilityCheck())) |
| 374 | break; |
| 375 | |
| 376 | unsigned Diag = diag::note_protected_by_if_available; |
| 377 | if (IS->isConstexpr()) |
| 378 | Diag = diag::note_protected_by_constexpr_if; |
| 379 | else if (IS->isConsteval()) |
| 380 | Diag = diag::note_protected_by_consteval_if; |
| 381 | |
| 382 | if (VarDecl *Var = IS->getConditionVariable()) |
| 383 | BuildScopeInformation(D: Var, ParentScope); |
| 384 | |
| 385 | // Cannot jump into the middle of the condition. |
| 386 | unsigned NewParentScope = Scopes.size(); |
| 387 | Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc())); |
| 388 | |
| 389 | if (!IS->isConsteval()) |
| 390 | BuildScopeInformation(S: IS->getCond(), origParentScope&: NewParentScope); |
| 391 | |
| 392 | // Jumps into either arm of an 'if constexpr' are not allowed. |
| 393 | NewParentScope = Scopes.size(); |
| 394 | Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc())); |
| 395 | BuildScopeInformation(S: IS->getThen(), origParentScope&: NewParentScope); |
| 396 | if (Stmt *Else = IS->getElse()) { |
| 397 | NewParentScope = Scopes.size(); |
| 398 | Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc())); |
| 399 | BuildScopeInformation(S: Else, origParentScope&: NewParentScope); |
| 400 | } |
| 401 | return; |
| 402 | } |
| 403 | |
| 404 | case Stmt::CXXTryStmtClass: { |
| 405 | CXXTryStmt *TS = cast<CXXTryStmt>(Val: S); |
| 406 | { |
| 407 | unsigned NewParentScope = Scopes.size(); |
| 408 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 409 | diag::note_protected_by_cxx_try, |
| 410 | diag::note_exits_cxx_try, |
| 411 | TS->getSourceRange().getBegin())); |
| 412 | if (Stmt *TryBlock = TS->getTryBlock()) |
| 413 | BuildScopeInformation(S: TryBlock, origParentScope&: NewParentScope); |
| 414 | } |
| 415 | |
| 416 | // Jump from the catch into the try is not allowed either. |
| 417 | for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) { |
| 418 | CXXCatchStmt *CS = TS->getHandler(i: I); |
| 419 | unsigned NewParentScope = Scopes.size(); |
| 420 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 421 | diag::note_protected_by_cxx_catch, |
| 422 | diag::note_exits_cxx_catch, |
| 423 | CS->getSourceRange().getBegin())); |
| 424 | BuildScopeInformation(S: CS->getHandlerBlock(), origParentScope&: NewParentScope); |
| 425 | } |
| 426 | return; |
| 427 | } |
| 428 | |
| 429 | case Stmt::SEHTryStmtClass: { |
| 430 | SEHTryStmt *TS = cast<SEHTryStmt>(Val: S); |
| 431 | { |
| 432 | unsigned NewParentScope = Scopes.size(); |
| 433 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 434 | diag::note_protected_by_seh_try, |
| 435 | diag::note_exits_seh_try, |
| 436 | TS->getSourceRange().getBegin())); |
| 437 | if (Stmt *TryBlock = TS->getTryBlock()) |
| 438 | BuildScopeInformation(S: TryBlock, origParentScope&: NewParentScope); |
| 439 | } |
| 440 | |
| 441 | // Jump from __except or __finally into the __try are not allowed either. |
| 442 | if (SEHExceptStmt *Except = TS->getExceptHandler()) { |
| 443 | unsigned NewParentScope = Scopes.size(); |
| 444 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 445 | diag::note_protected_by_seh_except, |
| 446 | diag::note_exits_seh_except, |
| 447 | Except->getSourceRange().getBegin())); |
| 448 | BuildScopeInformation(S: Except->getBlock(), origParentScope&: NewParentScope); |
| 449 | } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) { |
| 450 | unsigned NewParentScope = Scopes.size(); |
| 451 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 452 | diag::note_protected_by_seh_finally, |
| 453 | diag::note_exits_seh_finally, |
| 454 | Finally->getSourceRange().getBegin())); |
| 455 | BuildScopeInformation(S: Finally->getBlock(), origParentScope&: NewParentScope); |
| 456 | } |
| 457 | |
| 458 | return; |
| 459 | } |
| 460 | |
| 461 | case Stmt::DeclStmtClass: { |
| 462 | // If this is a declstmt with a VLA definition, it defines a scope from here |
| 463 | // to the end of the containing context. |
| 464 | DeclStmt *DS = cast<DeclStmt>(Val: S); |
| 465 | // The decl statement creates a scope if any of the decls in it are VLAs |
| 466 | // or have the cleanup attribute. |
| 467 | for (auto *I : DS->decls()) |
| 468 | BuildScopeInformation(D: I, ParentScope&: origParentScope); |
| 469 | return; |
| 470 | } |
| 471 | |
| 472 | case Stmt::StmtExprClass: { |
| 473 | // [GNU] |
| 474 | // Jumping into a statement expression with goto or using |
| 475 | // a switch statement outside the statement expression with |
| 476 | // a case or default label inside the statement expression is not permitted. |
| 477 | // Jumping out of a statement expression is permitted. |
| 478 | StmtExpr *SE = cast<StmtExpr>(Val: S); |
| 479 | unsigned NewParentScope = Scopes.size(); |
| 480 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 481 | diag::note_enters_statement_expression, |
| 482 | /*OutDiag=*/0, SE->getBeginLoc())); |
| 483 | BuildScopeInformation(S: SE->getSubStmt(), origParentScope&: NewParentScope); |
| 484 | return; |
| 485 | } |
| 486 | |
| 487 | case Stmt::ObjCAtTryStmtClass: { |
| 488 | // Disallow jumps into any part of an @try statement by pushing a scope and |
| 489 | // walking all sub-stmts in that scope. |
| 490 | ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(Val: S); |
| 491 | // Recursively walk the AST for the @try part. |
| 492 | { |
| 493 | unsigned NewParentScope = Scopes.size(); |
| 494 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 495 | diag::note_protected_by_objc_try, |
| 496 | diag::note_exits_objc_try, |
| 497 | AT->getAtTryLoc())); |
| 498 | if (Stmt *TryPart = AT->getTryBody()) |
| 499 | BuildScopeInformation(S: TryPart, origParentScope&: NewParentScope); |
| 500 | } |
| 501 | |
| 502 | // Jump from the catch to the finally or try is not valid. |
| 503 | for (ObjCAtCatchStmt *AC : AT->catch_stmts()) { |
| 504 | unsigned NewParentScope = Scopes.size(); |
| 505 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 506 | diag::note_protected_by_objc_catch, |
| 507 | diag::note_exits_objc_catch, |
| 508 | AC->getAtCatchLoc())); |
| 509 | // @catches are nested and it isn't |
| 510 | BuildScopeInformation(S: AC->getCatchBody(), origParentScope&: NewParentScope); |
| 511 | } |
| 512 | |
| 513 | // Jump from the finally to the try or catch is not valid. |
| 514 | if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) { |
| 515 | unsigned NewParentScope = Scopes.size(); |
| 516 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 517 | diag::note_protected_by_objc_finally, |
| 518 | diag::note_exits_objc_finally, |
| 519 | AF->getAtFinallyLoc())); |
| 520 | BuildScopeInformation(S: AF, origParentScope&: NewParentScope); |
| 521 | } |
| 522 | |
| 523 | return; |
| 524 | } |
| 525 | |
| 526 | case Stmt::ObjCAtSynchronizedStmtClass: { |
| 527 | // Disallow jumps into the protected statement of an @synchronized, but |
| 528 | // allow jumps into the object expression it protects. |
| 529 | ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(Val: S); |
| 530 | // Recursively walk the AST for the @synchronized object expr, it is |
| 531 | // evaluated in the normal scope. |
| 532 | BuildScopeInformation(S: AS->getSynchExpr(), origParentScope&: ParentScope); |
| 533 | |
| 534 | // Recursively walk the AST for the @synchronized part, protected by a new |
| 535 | // scope. |
| 536 | unsigned NewParentScope = Scopes.size(); |
| 537 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 538 | diag::note_protected_by_objc_synchronized, |
| 539 | diag::note_exits_objc_synchronized, |
| 540 | AS->getAtSynchronizedLoc())); |
| 541 | BuildScopeInformation(S: AS->getSynchBody(), origParentScope&: NewParentScope); |
| 542 | return; |
| 543 | } |
| 544 | |
| 545 | case Stmt::ObjCAutoreleasePoolStmtClass: { |
| 546 | // Disallow jumps into the protected statement of an @autoreleasepool. |
| 547 | ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(Val: S); |
| 548 | // Recursively walk the AST for the @autoreleasepool part, protected by a |
| 549 | // new scope. |
| 550 | unsigned NewParentScope = Scopes.size(); |
| 551 | Scopes.push_back(Elt: GotoScope(ParentScope, |
| 552 | diag::note_protected_by_objc_autoreleasepool, |
| 553 | diag::note_exits_objc_autoreleasepool, |
| 554 | AS->getAtLoc())); |
| 555 | BuildScopeInformation(S: AS->getSubStmt(), origParentScope&: NewParentScope); |
| 556 | return; |
| 557 | } |
| 558 | |
| 559 | case Stmt::ExprWithCleanupsClass: { |
| 560 | // Disallow jumps past full-expressions that use blocks with |
| 561 | // non-trivial cleanups of their captures. This is theoretically |
| 562 | // implementable but a lot of work which we haven't felt up to doing. |
| 563 | ExprWithCleanups *EWC = cast<ExprWithCleanups>(Val: S); |
| 564 | for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) { |
| 565 | if (auto *BDecl = dyn_cast<BlockDecl *>(Val: EWC->getObject(i))) |
| 566 | for (const auto &CI : BDecl->captures()) { |
| 567 | VarDecl *variable = CI.getVariable(); |
| 568 | BuildScopeInformation(D: variable, BDecl, ParentScope&: origParentScope); |
| 569 | } |
| 570 | else if (auto *CLE = dyn_cast<CompoundLiteralExpr *>(Val: EWC->getObject(i))) |
| 571 | BuildScopeInformation(CLE, ParentScope&: origParentScope); |
| 572 | else |
| 573 | llvm_unreachable("unexpected cleanup object type" ); |
| 574 | } |
| 575 | break; |
| 576 | } |
| 577 | |
| 578 | case Stmt::MaterializeTemporaryExprClass: { |
| 579 | // Disallow jumps out of scopes containing temporaries lifetime-extended to |
| 580 | // automatic storage duration. |
| 581 | MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(Val: S); |
| 582 | if (MTE->getStorageDuration() == SD_Automatic) { |
| 583 | const Expr *ExtendedObject = |
| 584 | MTE->getSubExpr()->skipRValueSubobjectAdjustments(); |
| 585 | if (ExtendedObject->getType().isDestructedType()) { |
| 586 | Scopes.push_back(Elt: GotoScope(ParentScope, 0, |
| 587 | diag::note_exits_temporary_dtor, |
| 588 | ExtendedObject->getExprLoc())); |
| 589 | origParentScope = Scopes.size()-1; |
| 590 | } |
| 591 | } |
| 592 | break; |
| 593 | } |
| 594 | |
| 595 | case Stmt::DeferStmtClass: { |
| 596 | auto *D = cast<DeferStmt>(Val: S); |
| 597 | |
| 598 | { |
| 599 | // Disallow jumps over defer statements. |
| 600 | unsigned NewParentScope = Scopes.size(); |
| 601 | Scopes.emplace_back(Args&: ParentScope, Args: diag::note_protected_by_defer_stmt, Args: 0, |
| 602 | Args: D->getDeferLoc()); |
| 603 | origParentScope = NewParentScope; |
| 604 | } |
| 605 | |
| 606 | // Disallow jumps into or out of defer statements. |
| 607 | { |
| 608 | unsigned NewParentScope = Scopes.size(); |
| 609 | Scopes.emplace_back(Args&: ParentScope, Args: diag::note_enters_defer_stmt, |
| 610 | Args: diag::note_exits_defer_stmt, Args: D->getDeferLoc()); |
| 611 | BuildScopeInformation(S: D->getBody(), origParentScope&: NewParentScope); |
| 612 | } |
| 613 | return; |
| 614 | } |
| 615 | |
| 616 | case Stmt::CaseStmtClass: |
| 617 | case Stmt::DefaultStmtClass: |
| 618 | case Stmt::LabelStmtClass: |
| 619 | LabelAndGotoScopes[S] = ParentScope; |
| 620 | break; |
| 621 | |
| 622 | case Stmt::OpenACCComputeConstructClass: { |
| 623 | unsigned NewParentScope = Scopes.size(); |
| 624 | OpenACCComputeConstruct *CC = cast<OpenACCComputeConstruct>(Val: S); |
| 625 | Scopes.push_back(Elt: GotoScope( |
| 626 | ParentScope, diag::note_acc_branch_into_compute_construct, |
| 627 | diag::note_acc_branch_out_of_compute_construct, CC->getBeginLoc())); |
| 628 | // This can be 'null' if the 'body' is a break that we diagnosed, so no |
| 629 | // reason to put the scope into place. |
| 630 | if (CC->getStructuredBlock()) |
| 631 | BuildScopeInformation(S: CC->getStructuredBlock(), origParentScope&: NewParentScope); |
| 632 | return; |
| 633 | } |
| 634 | |
| 635 | case Stmt::OpenACCCombinedConstructClass: { |
| 636 | unsigned NewParentScope = Scopes.size(); |
| 637 | OpenACCCombinedConstruct *CC = cast<OpenACCCombinedConstruct>(Val: S); |
| 638 | Scopes.push_back(Elt: GotoScope( |
| 639 | ParentScope, diag::note_acc_branch_into_compute_construct, |
| 640 | diag::note_acc_branch_out_of_compute_construct, CC->getBeginLoc())); |
| 641 | // This can be 'null' if the 'body' is a break that we diagnosed, so no |
| 642 | // reason to put the scope into place. |
| 643 | if (CC->getLoop()) |
| 644 | BuildScopeInformation(S: CC->getLoop(), origParentScope&: NewParentScope); |
| 645 | return; |
| 646 | } |
| 647 | |
| 648 | default: |
| 649 | if (auto *ED = dyn_cast<OMPExecutableDirective>(Val: S)) { |
| 650 | if (!ED->isStandaloneDirective()) { |
| 651 | unsigned NewParentScope = Scopes.size(); |
| 652 | Scopes.emplace_back(Args&: ParentScope, |
| 653 | Args: diag::note_omp_protected_structured_block, |
| 654 | Args: diag::note_omp_exits_structured_block, |
| 655 | Args: ED->getStructuredBlock()->getBeginLoc()); |
| 656 | BuildScopeInformation(S: ED->getStructuredBlock(), origParentScope&: NewParentScope); |
| 657 | return; |
| 658 | } |
| 659 | } |
| 660 | break; |
| 661 | } |
| 662 | |
| 663 | for (Stmt *SubStmt : S->children()) { |
| 664 | if (!SubStmt) |
| 665 | continue; |
| 666 | if (StmtsToSkip) { |
| 667 | --StmtsToSkip; |
| 668 | continue; |
| 669 | } |
| 670 | |
| 671 | // Cases, labels, attributes, and defaults aren't "scope parents". It's also |
| 672 | // important to handle these iteratively instead of recursively in |
| 673 | // order to avoid blowing out the stack. |
| 674 | while (true) { |
| 675 | Stmt *Next; |
| 676 | if (SwitchCase *SC = dyn_cast<SwitchCase>(Val: SubStmt)) |
| 677 | Next = SC->getSubStmt(); |
| 678 | else if (LabelStmt *LS = dyn_cast<LabelStmt>(Val: SubStmt)) |
| 679 | Next = LS->getSubStmt(); |
| 680 | else if (AttributedStmt *AS = dyn_cast<AttributedStmt>(Val: SubStmt)) { |
| 681 | if (GetMustTailAttr(AS)) { |
| 682 | LabelAndGotoScopes[AS] = ParentScope; |
| 683 | MustTailStmts.push_back(Elt: AS); |
| 684 | } |
| 685 | Next = AS->getSubStmt(); |
| 686 | } else |
| 687 | break; |
| 688 | |
| 689 | LabelAndGotoScopes[SubStmt] = ParentScope; |
| 690 | SubStmt = Next; |
| 691 | } |
| 692 | |
| 693 | // Recursively walk the AST. |
| 694 | BuildScopeInformation(S: SubStmt, origParentScope&: ParentScope); |
| 695 | } |
| 696 | } |
| 697 | |
| 698 | /// VerifyJumps - Verify each element of the Jumps array to see if they are |
| 699 | /// valid, emitting diagnostics if not. |
| 700 | void JumpScopeChecker::VerifyJumps() { |
| 701 | while (!Jumps.empty()) { |
| 702 | Stmt *Jump = Jumps.pop_back_val(); |
| 703 | |
| 704 | // With a goto, |
| 705 | if (GotoStmt *GS = dyn_cast<GotoStmt>(Val: Jump)) { |
| 706 | // The label may not have a statement if it's coming from inline MS ASM. |
| 707 | if (GS->getLabel()->getStmt()) { |
| 708 | CheckJump(From: GS, To: GS->getLabel()->getStmt(), DiagLoc: GS->getGotoLoc(), |
| 709 | JumpDiag: diag::err_goto_into_protected_scope, |
| 710 | JumpDiagWarning: diag::ext_goto_into_protected_scope, |
| 711 | JumpDiagCompat: S.getLangOpts().CPlusPlus |
| 712 | ? diag::warn_cxx98_compat_goto_into_protected_scope |
| 713 | : diag::warn_cpp_compat_goto_into_protected_scope); |
| 714 | } |
| 715 | CheckGotoStmt(GS); |
| 716 | continue; |
| 717 | } |
| 718 | |
| 719 | // If an asm goto jumps to a different scope, things like destructors or |
| 720 | // initializers might not be run which may be suprising to users. Perhaps |
| 721 | // this behavior can be changed in the future, but today Clang will not |
| 722 | // generate such code. Produce a diagnostic instead. See also the |
| 723 | // discussion here: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110728. |
| 724 | if (auto *G = dyn_cast<GCCAsmStmt>(Val: Jump)) { |
| 725 | for (AddrLabelExpr *L : G->labels()) { |
| 726 | LabelDecl *LD = L->getLabel(); |
| 727 | unsigned JumpScope = LabelAndGotoScopes[G]; |
| 728 | unsigned TargetScope = LabelAndGotoScopes[LD->getStmt()]; |
| 729 | if (JumpScope != TargetScope) |
| 730 | DiagnoseIndirectOrAsmJump(IG: G, IGScope: JumpScope, Target: LD, TargetScope); |
| 731 | } |
| 732 | continue; |
| 733 | } |
| 734 | |
| 735 | // We only get indirect gotos here when they have a constant target. |
| 736 | if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Val: Jump)) { |
| 737 | LabelDecl *Target = IGS->getConstantTarget(); |
| 738 | CheckJump(From: IGS, To: Target->getStmt(), DiagLoc: IGS->getGotoLoc(), |
| 739 | JumpDiag: diag::err_goto_into_protected_scope, |
| 740 | JumpDiagWarning: diag::ext_goto_into_protected_scope, |
| 741 | JumpDiagCompat: S.getLangOpts().CPlusPlus |
| 742 | ? diag::warn_cxx98_compat_goto_into_protected_scope |
| 743 | : diag::warn_cpp_compat_goto_into_protected_scope); |
| 744 | continue; |
| 745 | } |
| 746 | |
| 747 | SwitchStmt *SS = cast<SwitchStmt>(Val: Jump); |
| 748 | for (SwitchCase *SC = SS->getSwitchCaseList(); SC; |
| 749 | SC = SC->getNextSwitchCase()) { |
| 750 | if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC))) |
| 751 | continue; |
| 752 | SourceLocation Loc; |
| 753 | if (CaseStmt *CS = dyn_cast<CaseStmt>(Val: SC)) |
| 754 | Loc = CS->getBeginLoc(); |
| 755 | else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(Val: SC)) |
| 756 | Loc = DS->getBeginLoc(); |
| 757 | else |
| 758 | Loc = SC->getBeginLoc(); |
| 759 | CheckJump(From: SS, To: SC, DiagLoc: Loc, JumpDiag: diag::err_switch_into_protected_scope, JumpDiagWarning: 0, |
| 760 | JumpDiagCompat: S.getLangOpts().CPlusPlus |
| 761 | ? diag::warn_cxx98_compat_switch_into_protected_scope |
| 762 | : diag::warn_cpp_compat_switch_into_protected_scope); |
| 763 | } |
| 764 | } |
| 765 | } |
| 766 | |
| 767 | /// VerifyIndirectJumps - Verify whether any possible indirect goto jump might |
| 768 | /// cross a protection boundary. Unlike direct jumps, indirect goto jumps |
| 769 | /// count cleanups as protection boundaries: since there's no way to know where |
| 770 | /// the jump is going, we can't implicitly run the right cleanups the way we |
| 771 | /// can with direct jumps. Thus, an indirect/asm jump is "trivial" if it |
| 772 | /// bypasses no initializations and no teardowns. More formally, an |
| 773 | /// indirect/asm jump from A to B is trivial if the path out from A to DCA(A,B) |
| 774 | /// is trivial and the path in from DCA(A,B) to B is trivial, where DCA(A,B) is |
| 775 | /// the deepest common ancestor of A and B. Jump-triviality is transitive but |
| 776 | /// asymmetric. |
| 777 | /// |
| 778 | /// A path in is trivial if none of the entered scopes have an InDiag. |
| 779 | /// A path out is trivial is none of the exited scopes have an OutDiag. |
| 780 | /// |
| 781 | /// Under these definitions, this function checks that the indirect |
| 782 | /// jump between A and B is trivial for every indirect goto statement A |
| 783 | /// and every label B whose address was taken in the function. |
| 784 | void JumpScopeChecker::VerifyIndirectJumps() { |
| 785 | if (IndirectJumps.empty()) |
| 786 | return; |
| 787 | // If there aren't any address-of-label expressions in this function, |
| 788 | // complain about the first indirect goto. |
| 789 | if (IndirectJumpTargets.empty()) { |
| 790 | S.Diag(Loc: IndirectJumps[0]->getBeginLoc(), |
| 791 | DiagID: diag::err_indirect_goto_without_addrlabel); |
| 792 | return; |
| 793 | } |
| 794 | // Collect a single representative of every scope containing an indirect |
| 795 | // goto. For most code bases, this substantially cuts down on the number of |
| 796 | // jump sites we'll have to consider later. |
| 797 | using JumpScope = std::pair<unsigned, Stmt *>; |
| 798 | SmallVector<JumpScope, 32> JumpScopes; |
| 799 | { |
| 800 | llvm::DenseMap<unsigned, Stmt*> JumpScopesMap; |
| 801 | for (Stmt *IG : IndirectJumps) { |
| 802 | if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG))) |
| 803 | continue; |
| 804 | unsigned IGScope = LabelAndGotoScopes[IG]; |
| 805 | JumpScopesMap.try_emplace(Key: IGScope, Args&: IG); |
| 806 | } |
| 807 | JumpScopes.reserve(N: JumpScopesMap.size()); |
| 808 | for (auto &Pair : JumpScopesMap) |
| 809 | JumpScopes.emplace_back(Args&: Pair); |
| 810 | } |
| 811 | |
| 812 | // Collect a single representative of every scope containing a |
| 813 | // label whose address was taken somewhere in the function. |
| 814 | // For most code bases, there will be only one such scope. |
| 815 | llvm::DenseMap<unsigned, LabelDecl*> TargetScopes; |
| 816 | for (LabelDecl *TheLabel : IndirectJumpTargets) { |
| 817 | if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt()))) |
| 818 | continue; |
| 819 | unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()]; |
| 820 | TargetScopes.try_emplace(Key: LabelScope, Args&: TheLabel); |
| 821 | } |
| 822 | |
| 823 | // For each target scope, make sure it's trivially reachable from |
| 824 | // every scope containing a jump site. |
| 825 | // |
| 826 | // A path between scopes always consists of exitting zero or more |
| 827 | // scopes, then entering zero or more scopes. We build a set of |
| 828 | // of scopes S from which the target scope can be trivially |
| 829 | // entered, then verify that every jump scope can be trivially |
| 830 | // exitted to reach a scope in S. |
| 831 | llvm::BitVector Reachable(Scopes.size(), false); |
| 832 | for (auto [TargetScope, TargetLabel] : TargetScopes) { |
| 833 | Reachable.reset(); |
| 834 | |
| 835 | // Mark all the enclosing scopes from which you can safely jump |
| 836 | // into the target scope. 'Min' will end up being the index of |
| 837 | // the shallowest such scope. |
| 838 | unsigned Min = TargetScope; |
| 839 | while (true) { |
| 840 | Reachable.set(Min); |
| 841 | |
| 842 | // Don't go beyond the outermost scope. |
| 843 | if (Min == 0) break; |
| 844 | |
| 845 | // Stop if we can't trivially enter the current scope. |
| 846 | if (Scopes[Min].InDiag) break; |
| 847 | |
| 848 | Min = Scopes[Min].ParentScope; |
| 849 | } |
| 850 | |
| 851 | // Walk through all the jump sites, checking that they can trivially |
| 852 | // reach this label scope. |
| 853 | for (auto [JumpScope, JumpStmt] : JumpScopes) { |
| 854 | unsigned Scope = JumpScope; |
| 855 | // Walk out the "scope chain" for this scope, looking for a scope |
| 856 | // we've marked reachable. For well-formed code this amortizes |
| 857 | // to O(JumpScopes.size() / Scopes.size()): we only iterate |
| 858 | // when we see something unmarked, and in well-formed code we |
| 859 | // mark everything we iterate past. |
| 860 | bool IsReachable = false; |
| 861 | while (true) { |
| 862 | if (Reachable.test(Idx: Scope)) { |
| 863 | // If we find something reachable, mark all the scopes we just |
| 864 | // walked through as reachable. |
| 865 | for (unsigned S = JumpScope; S != Scope; S = Scopes[S].ParentScope) |
| 866 | Reachable.set(S); |
| 867 | IsReachable = true; |
| 868 | break; |
| 869 | } |
| 870 | |
| 871 | // Don't walk out if we've reached the top-level scope or we've |
| 872 | // gotten shallower than the shallowest reachable scope. |
| 873 | if (Scope == 0 || Scope < Min) break; |
| 874 | |
| 875 | // Don't walk out through an out-diagnostic. |
| 876 | if (Scopes[Scope].OutDiag) break; |
| 877 | |
| 878 | Scope = Scopes[Scope].ParentScope; |
| 879 | } |
| 880 | |
| 881 | // Only diagnose if we didn't find something. |
| 882 | if (IsReachable) continue; |
| 883 | |
| 884 | DiagnoseIndirectOrAsmJump(IG: JumpStmt, IGScope: JumpScope, Target: TargetLabel, TargetScope); |
| 885 | } |
| 886 | } |
| 887 | } |
| 888 | |
| 889 | /// Return true if a particular error+note combination must be downgraded to a |
| 890 | /// warning in Microsoft mode. |
| 891 | static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) { |
| 892 | return (JumpDiag == diag::err_goto_into_protected_scope && |
| 893 | (InDiagNote == diag::note_protected_by_variable_init || |
| 894 | InDiagNote == diag::note_protected_by_variable_nontriv_destructor)); |
| 895 | } |
| 896 | |
| 897 | /// Return true if a particular note should be downgraded to a compatibility |
| 898 | /// warning in C++11 mode. |
| 899 | static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) { |
| 900 | return S.getLangOpts().CPlusPlus11 && |
| 901 | InDiagNote == diag::note_protected_by_variable_non_pod; |
| 902 | } |
| 903 | |
| 904 | /// Returns true if a particular note should be a C++ compatibility warning in |
| 905 | /// C mode with -Wc++-compat. |
| 906 | static bool IsCppCompatWarning(Sema &S, unsigned InDiagNote) { |
| 907 | return !S.getLangOpts().CPlusPlus && |
| 908 | InDiagNote == diag::note_protected_by_variable_init; |
| 909 | } |
| 910 | |
| 911 | /// Produce primary diagnostic for an indirect jump statement. |
| 912 | static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump, |
| 913 | LabelDecl *Target, bool &Diagnosed) { |
| 914 | if (Diagnosed) |
| 915 | return; |
| 916 | bool IsAsmGoto = isa<GCCAsmStmt>(Val: Jump); |
| 917 | S.Diag(Loc: Jump->getBeginLoc(), DiagID: diag::err_indirect_goto_in_protected_scope) |
| 918 | << IsAsmGoto; |
| 919 | S.Diag(Loc: Target->getStmt()->getIdentLoc(), DiagID: diag::note_indirect_goto_target) |
| 920 | << IsAsmGoto; |
| 921 | Diagnosed = true; |
| 922 | } |
| 923 | |
| 924 | /// Produce note diagnostics for a jump into a protected scope. |
| 925 | void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) { |
| 926 | if (CHECK_PERMISSIVE(ToScopes.empty())) |
| 927 | return; |
| 928 | for (unsigned I = 0, E = ToScopes.size(); I != E; ++I) |
| 929 | if (Scopes[ToScopes[I]].InDiag) |
| 930 | S.Diag(Loc: Scopes[ToScopes[I]].Loc, DiagID: Scopes[ToScopes[I]].InDiag); |
| 931 | } |
| 932 | |
| 933 | /// Diagnose an indirect jump which is known to cross scopes. |
| 934 | void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope, |
| 935 | LabelDecl *Target, |
| 936 | unsigned TargetScope) { |
| 937 | if (CHECK_PERMISSIVE(JumpScope == TargetScope)) |
| 938 | return; |
| 939 | |
| 940 | unsigned Common = GetDeepestCommonScope(A: JumpScope, B: TargetScope); |
| 941 | bool Diagnosed = false; |
| 942 | |
| 943 | // Walk out the scope chain until we reach the common ancestor. |
| 944 | for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope) |
| 945 | if (Scopes[I].OutDiag) { |
| 946 | DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed); |
| 947 | S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].OutDiag); |
| 948 | } |
| 949 | |
| 950 | SmallVector<unsigned, 10> ToScopesCXX98Compat, ToScopesCppCompat; |
| 951 | |
| 952 | // Now walk into the scopes containing the label whose address was taken. |
| 953 | for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope) |
| 954 | if (IsCXX98CompatWarning(S, InDiagNote: Scopes[I].InDiag)) |
| 955 | ToScopesCXX98Compat.push_back(Elt: I); |
| 956 | else if (IsCppCompatWarning(S, InDiagNote: Scopes[I].InDiag)) |
| 957 | ToScopesCppCompat.push_back(Elt: I); |
| 958 | else if (Scopes[I].InDiag) { |
| 959 | DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed); |
| 960 | S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].InDiag); |
| 961 | } |
| 962 | |
| 963 | // Diagnose this jump if it would be ill-formed in C++[98]. |
| 964 | if (!Diagnosed) { |
| 965 | bool IsAsmGoto = isa<GCCAsmStmt>(Val: Jump); |
| 966 | auto Diag = [&](unsigned DiagId, const SmallVectorImpl<unsigned> &Notes) { |
| 967 | S.Diag(Loc: Jump->getBeginLoc(), DiagID: DiagId) << IsAsmGoto; |
| 968 | S.Diag(Loc: Target->getStmt()->getIdentLoc(), DiagID: diag::note_indirect_goto_target) |
| 969 | << IsAsmGoto; |
| 970 | NoteJumpIntoScopes(ToScopes: Notes); |
| 971 | }; |
| 972 | if (!ToScopesCXX98Compat.empty()) |
| 973 | Diag(diag::warn_cxx98_compat_indirect_goto_in_protected_scope, |
| 974 | ToScopesCXX98Compat); |
| 975 | else if (!ToScopesCppCompat.empty()) |
| 976 | Diag(diag::warn_cpp_compat_indirect_goto_in_protected_scope, |
| 977 | ToScopesCppCompat); |
| 978 | } |
| 979 | } |
| 980 | |
| 981 | /// CheckJump - Validate that the specified jump statement is valid: that it is |
| 982 | /// jumping within or out of its current scope, not into a deeper one. |
| 983 | void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc, |
| 984 | unsigned JumpDiagError, |
| 985 | unsigned JumpDiagWarning, |
| 986 | unsigned JumpDiagCompat) { |
| 987 | if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From))) |
| 988 | return; |
| 989 | if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To))) |
| 990 | return; |
| 991 | |
| 992 | unsigned FromScope = LabelAndGotoScopes[From]; |
| 993 | unsigned ToScope = LabelAndGotoScopes[To]; |
| 994 | |
| 995 | // Common case: exactly the same scope, which is fine. |
| 996 | if (FromScope == ToScope) return; |
| 997 | |
| 998 | // Warn on gotos out of __finally blocks and defer statements. |
| 999 | if (isa<GotoStmt>(Val: From) || isa<IndirectGotoStmt>(Val: From)) { |
| 1000 | // If FromScope > ToScope, FromScope is more nested and the jump goes to a |
| 1001 | // less nested scope. Check if it crosses a __finally along the way. |
| 1002 | for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) { |
| 1003 | if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) { |
| 1004 | S.Diag(Loc: From->getBeginLoc(), DiagID: diag::warn_jump_out_of_seh_finally); |
| 1005 | break; |
| 1006 | } else if (Scopes[I].InDiag == |
| 1007 | diag::note_omp_protected_structured_block) { |
| 1008 | S.Diag(Loc: From->getBeginLoc(), DiagID: diag::err_goto_into_protected_scope); |
| 1009 | S.Diag(Loc: To->getBeginLoc(), DiagID: diag::note_omp_exits_structured_block); |
| 1010 | break; |
| 1011 | } else if (Scopes[I].InDiag == |
| 1012 | diag::note_acc_branch_into_compute_construct) { |
| 1013 | S.Diag(Loc: From->getBeginLoc(), DiagID: diag::err_goto_into_protected_scope); |
| 1014 | S.Diag(Loc: Scopes[I].Loc, DiagID: diag::note_acc_branch_out_of_compute_construct); |
| 1015 | return; |
| 1016 | } else if (Scopes[I].OutDiag == diag::note_exits_defer_stmt) { |
| 1017 | S.Diag(Loc: From->getBeginLoc(), DiagID: diag::err_goto_into_protected_scope); |
| 1018 | S.Diag(Loc: Scopes[I].Loc, DiagID: diag::note_exits_defer_stmt); |
| 1019 | return; |
| 1020 | } |
| 1021 | } |
| 1022 | } |
| 1023 | |
| 1024 | unsigned CommonScope = GetDeepestCommonScope(A: FromScope, B: ToScope); |
| 1025 | |
| 1026 | // It's okay to jump out from a nested scope. |
| 1027 | if (CommonScope == ToScope) return; |
| 1028 | |
| 1029 | // Pull out (and reverse) any scopes we might need to diagnose skipping. |
| 1030 | SmallVector<unsigned, 10> ToScopesCompat; |
| 1031 | SmallVector<unsigned, 10> ToScopesError; |
| 1032 | SmallVector<unsigned, 10> ToScopesWarning; |
| 1033 | for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) { |
| 1034 | if (S.getLangOpts().MSVCCompat && S.getLangOpts().CPlusPlus && |
| 1035 | JumpDiagWarning != 0 && |
| 1036 | IsMicrosoftJumpWarning(JumpDiag: JumpDiagError, InDiagNote: Scopes[I].InDiag)) |
| 1037 | ToScopesWarning.push_back(Elt: I); |
| 1038 | else if (IsCXX98CompatWarning(S, InDiagNote: Scopes[I].InDiag) || |
| 1039 | IsCppCompatWarning(S, InDiagNote: Scopes[I].InDiag)) |
| 1040 | ToScopesCompat.push_back(Elt: I); |
| 1041 | else if (Scopes[I].InDiag) |
| 1042 | ToScopesError.push_back(Elt: I); |
| 1043 | } |
| 1044 | |
| 1045 | // Handle warnings. |
| 1046 | if (!ToScopesWarning.empty()) { |
| 1047 | S.Diag(Loc: DiagLoc, DiagID: JumpDiagWarning); |
| 1048 | NoteJumpIntoScopes(ToScopes: ToScopesWarning); |
| 1049 | assert(isa<LabelStmt>(To)); |
| 1050 | LabelStmt *Label = cast<LabelStmt>(Val: To); |
| 1051 | Label->setSideEntry(true); |
| 1052 | } |
| 1053 | |
| 1054 | // Handle errors. |
| 1055 | if (!ToScopesError.empty()) { |
| 1056 | S.Diag(Loc: DiagLoc, DiagID: JumpDiagError); |
| 1057 | NoteJumpIntoScopes(ToScopes: ToScopesError); |
| 1058 | } |
| 1059 | |
| 1060 | // Handle -Wc++98-compat or -Wc++-compat warnings if the jump is well-formed. |
| 1061 | if (ToScopesError.empty() && !ToScopesCompat.empty()) { |
| 1062 | S.Diag(Loc: DiagLoc, DiagID: JumpDiagCompat); |
| 1063 | NoteJumpIntoScopes(ToScopes: ToScopesCompat); |
| 1064 | } |
| 1065 | } |
| 1066 | |
| 1067 | void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) { |
| 1068 | if (GS->getLabel()->isMSAsmLabel()) { |
| 1069 | S.Diag(Loc: GS->getGotoLoc(), DiagID: diag::err_goto_ms_asm_label) |
| 1070 | << GS->getLabel()->getIdentifier(); |
| 1071 | S.Diag(Loc: GS->getLabel()->getLocation(), DiagID: diag::note_goto_ms_asm_label) |
| 1072 | << GS->getLabel()->getIdentifier(); |
| 1073 | } |
| 1074 | } |
| 1075 | |
| 1076 | void JumpScopeChecker::VerifyMustTailStmts() { |
| 1077 | for (AttributedStmt *AS : MustTailStmts) { |
| 1078 | for (unsigned I = LabelAndGotoScopes[AS]; I; I = Scopes[I].ParentScope) { |
| 1079 | if (Scopes[I].OutDiag) { |
| 1080 | S.Diag(Loc: AS->getBeginLoc(), DiagID: diag::err_musttail_scope); |
| 1081 | S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].OutDiag); |
| 1082 | } |
| 1083 | } |
| 1084 | } |
| 1085 | } |
| 1086 | |
| 1087 | const Attr *JumpScopeChecker::GetMustTailAttr(AttributedStmt *AS) { |
| 1088 | ArrayRef<const Attr *> Attrs = AS->getAttrs(); |
| 1089 | const auto *Iter = |
| 1090 | llvm::find_if(Range&: Attrs, P: [](const Attr *A) { return isa<MustTailAttr>(Val: A); }); |
| 1091 | return Iter != Attrs.end() ? *Iter : nullptr; |
| 1092 | } |
| 1093 | |
| 1094 | void Sema::DiagnoseInvalidJumps(Stmt *Body) { |
| 1095 | (void)JumpScopeChecker(Body, *this); |
| 1096 | } |
| 1097 | |