1//===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the JumpScopeChecker class, which is used to diagnose
10// jumps that enter a protected scope in an invalid way.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/AST/DeclCXX.h"
15#include "clang/AST/Expr.h"
16#include "clang/AST/ExprCXX.h"
17#include "clang/AST/StmtCXX.h"
18#include "clang/AST/StmtObjC.h"
19#include "clang/AST/StmtOpenACC.h"
20#include "clang/AST/StmtOpenMP.h"
21#include "clang/Basic/SourceLocation.h"
22#include "clang/Sema/SemaInternal.h"
23#include "llvm/ADT/BitVector.h"
24using namespace clang;
25
26namespace {
27
28/// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
29/// into VLA and other protected scopes. For example, this rejects:
30/// goto L;
31/// int a[n];
32/// L:
33///
34/// We also detect jumps out of protected scopes when it's not possible to do
35/// cleanups properly. Indirect jumps and ASM jumps can't do cleanups because
36/// the target is unknown. Return statements with \c [[clang::musttail]] cannot
37/// handle any cleanups due to the nature of a tail call.
38class JumpScopeChecker {
39 Sema &S;
40
41 /// Permissive - True when recovering from errors, in which case precautions
42 /// are taken to handle incomplete scope information.
43 const bool Permissive;
44
45 /// GotoScope - This is a record that we use to keep track of all of the
46 /// scopes that are introduced by VLAs and other things that scope jumps like
47 /// gotos. This scope tree has nothing to do with the source scope tree,
48 /// because you can have multiple VLA scopes per compound statement, and most
49 /// compound statements don't introduce any scopes.
50 struct GotoScope {
51 /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
52 /// the parent scope is the function body.
53 unsigned ParentScope;
54
55 /// InDiag - The note to emit if there is a jump into this scope.
56 unsigned InDiag;
57
58 /// OutDiag - The note to emit if there is an indirect jump out
59 /// of this scope. Direct jumps always clean up their current scope
60 /// in an orderly way.
61 unsigned OutDiag;
62
63 /// Loc - Location to emit the diagnostic.
64 SourceLocation Loc;
65
66 GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
67 SourceLocation L)
68 : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
69 };
70
71 SmallVector<GotoScope, 48> Scopes;
72 llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
73 SmallVector<Stmt*, 16> Jumps;
74
75 SmallVector<Stmt*, 4> IndirectJumps;
76 SmallVector<LabelDecl *, 4> IndirectJumpTargets;
77 SmallVector<AttributedStmt *, 4> MustTailStmts;
78
79public:
80 JumpScopeChecker(Stmt *Body, Sema &S);
81private:
82 void BuildScopeInformation(Decl *D, unsigned &ParentScope);
83 void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
84 unsigned &ParentScope);
85 void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
86 void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
87
88 void VerifyJumps();
89 void VerifyIndirectJumps();
90 void VerifyMustTailStmts();
91 void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
92 void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
93 unsigned TargetScope);
94 void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
95 unsigned JumpDiag, unsigned JumpDiagWarning,
96 unsigned JumpDiagCXX98Compat);
97 void CheckGotoStmt(GotoStmt *GS);
98 const Attr *GetMustTailAttr(AttributedStmt *AS);
99
100 unsigned GetDeepestCommonScope(unsigned A, unsigned B);
101};
102} // end anonymous namespace
103
104#define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
105
106JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
107 : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
108 // Add a scope entry for function scope.
109 Scopes.push_back(Elt: GotoScope(~0U, ~0U, ~0U, SourceLocation()));
110
111 // Build information for the top level compound statement, so that we have a
112 // defined scope record for every "goto" and label.
113 unsigned BodyParentScope = 0;
114 BuildScopeInformation(S: Body, origParentScope&: BodyParentScope);
115
116 // Check that all jumps we saw are kosher.
117 VerifyJumps();
118 VerifyIndirectJumps();
119 VerifyMustTailStmts();
120}
121
122/// GetDeepestCommonScope - Finds the innermost scope enclosing the
123/// two scopes.
124unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
125 while (A != B) {
126 // Inner scopes are created after outer scopes and therefore have
127 // higher indices.
128 if (A < B) {
129 assert(Scopes[B].ParentScope < B);
130 B = Scopes[B].ParentScope;
131 } else {
132 assert(Scopes[A].ParentScope < A);
133 A = Scopes[A].ParentScope;
134 }
135 }
136 return A;
137}
138
139typedef std::pair<unsigned,unsigned> ScopePair;
140
141/// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
142/// diagnostic that should be emitted if control goes over it. If not, return 0.
143static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
144 if (const VarDecl *VD = dyn_cast<VarDecl>(Val: D)) {
145 unsigned InDiag = 0;
146 unsigned OutDiag = 0;
147
148 if (VD->getType()->isVariablyModifiedType())
149 InDiag = diag::note_protected_by_vla;
150
151 if (VD->hasAttr<BlocksAttr>())
152 return ScopePair(diag::note_protected_by___block,
153 diag::note_exits___block);
154
155 if (VD->hasAttr<CleanupAttr>())
156 return ScopePair(diag::note_protected_by_cleanup,
157 diag::note_exits_cleanup);
158
159 if (VD->hasLocalStorage()) {
160 switch (VD->getType().isDestructedType()) {
161 case QualType::DK_objc_strong_lifetime:
162 return ScopePair(diag::note_protected_by_objc_strong_init,
163 diag::note_exits_objc_strong);
164
165 case QualType::DK_objc_weak_lifetime:
166 return ScopePair(diag::note_protected_by_objc_weak_init,
167 diag::note_exits_objc_weak);
168
169 case QualType::DK_nontrivial_c_struct:
170 return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
171 diag::note_exits_dtor);
172
173 case QualType::DK_cxx_destructor:
174 OutDiag = diag::note_exits_dtor;
175 break;
176
177 case QualType::DK_none:
178 break;
179 }
180 }
181
182 const Expr *Init = VD->getInit();
183 if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init &&
184 !Init->containsErrors()) {
185 // C++11 [stmt.dcl]p3:
186 // A program that jumps from a point where a variable with automatic
187 // storage duration is not in scope to a point where it is in scope
188 // is ill-formed unless the variable has scalar type, class type with
189 // a trivial default constructor and a trivial destructor, a
190 // cv-qualified version of one of these types, or an array of one of
191 // the preceding types and is declared without an initializer.
192
193 // C++03 [stmt.dcl.p3:
194 // A program that jumps from a point where a local variable
195 // with automatic storage duration is not in scope to a point
196 // where it is in scope is ill-formed unless the variable has
197 // POD type and is declared without an initializer.
198
199 InDiag = diag::note_protected_by_variable_init;
200
201 // For a variable of (array of) class type declared without an
202 // initializer, we will have call-style initialization and the initializer
203 // will be the CXXConstructExpr with no intervening nodes.
204 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Val: Init)) {
205 const CXXConstructorDecl *Ctor = CCE->getConstructor();
206 if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
207 VD->getInitStyle() == VarDecl::CallInit) {
208 if (OutDiag)
209 InDiag = diag::note_protected_by_variable_nontriv_destructor;
210 else if (!Ctor->getParent()->isPOD())
211 InDiag = diag::note_protected_by_variable_non_pod;
212 else
213 InDiag = 0;
214 }
215 }
216 }
217
218 return ScopePair(InDiag, OutDiag);
219 }
220
221 if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(Val: D)) {
222 if (TD->getUnderlyingType()->isVariablyModifiedType())
223 return ScopePair(isa<TypedefDecl>(Val: TD)
224 ? diag::note_protected_by_vla_typedef
225 : diag::note_protected_by_vla_type_alias,
226 0);
227 }
228
229 return ScopePair(0U, 0U);
230}
231
232/// Build scope information for a declaration that is part of a DeclStmt.
233void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
234 // If this decl causes a new scope, push and switch to it.
235 std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
236 if (Diags.first || Diags.second) {
237 Scopes.push_back(Elt: GotoScope(ParentScope, Diags.first, Diags.second,
238 D->getLocation()));
239 ParentScope = Scopes.size()-1;
240 }
241
242 // If the decl has an initializer, walk it with the potentially new
243 // scope we just installed.
244 if (VarDecl *VD = dyn_cast<VarDecl>(Val: D))
245 if (Expr *Init = VD->getInit())
246 BuildScopeInformation(S: Init, origParentScope&: ParentScope);
247}
248
249/// Build scope information for a captured block literal variables.
250void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
251 const BlockDecl *BDecl,
252 unsigned &ParentScope) {
253 // exclude captured __block variables; there's no destructor
254 // associated with the block literal for them.
255 if (D->hasAttr<BlocksAttr>())
256 return;
257 QualType T = D->getType();
258 QualType::DestructionKind destructKind = T.isDestructedType();
259 if (destructKind != QualType::DK_none) {
260 std::pair<unsigned,unsigned> Diags;
261 switch (destructKind) {
262 case QualType::DK_cxx_destructor:
263 Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
264 diag::note_exits_block_captures_cxx_obj);
265 break;
266 case QualType::DK_objc_strong_lifetime:
267 Diags = ScopePair(diag::note_enters_block_captures_strong,
268 diag::note_exits_block_captures_strong);
269 break;
270 case QualType::DK_objc_weak_lifetime:
271 Diags = ScopePair(diag::note_enters_block_captures_weak,
272 diag::note_exits_block_captures_weak);
273 break;
274 case QualType::DK_nontrivial_c_struct:
275 Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
276 diag::note_exits_block_captures_non_trivial_c_struct);
277 break;
278 case QualType::DK_none:
279 llvm_unreachable("non-lifetime captured variable");
280 }
281 SourceLocation Loc = D->getLocation();
282 if (Loc.isInvalid())
283 Loc = BDecl->getLocation();
284 Scopes.push_back(Elt: GotoScope(ParentScope,
285 Diags.first, Diags.second, Loc));
286 ParentScope = Scopes.size()-1;
287 }
288}
289
290/// Build scope information for compound literals of C struct types that are
291/// non-trivial to destruct.
292void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
293 unsigned &ParentScope) {
294 unsigned InDiag = diag::note_enters_compound_literal_scope;
295 unsigned OutDiag = diag::note_exits_compound_literal_scope;
296 Scopes.push_back(Elt: GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
297 ParentScope = Scopes.size() - 1;
298}
299
300/// BuildScopeInformation - The statements from CI to CE are known to form a
301/// coherent VLA scope with a specified parent node. Walk through the
302/// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
303/// walking the AST as needed.
304void JumpScopeChecker::BuildScopeInformation(Stmt *S,
305 unsigned &origParentScope) {
306 // If this is a statement, rather than an expression, scopes within it don't
307 // propagate out into the enclosing scope. Otherwise we have to worry
308 // about block literals, which have the lifetime of their enclosing statement.
309 unsigned independentParentScope = origParentScope;
310 unsigned &ParentScope = ((isa<Expr>(Val: S) && !isa<StmtExpr>(Val: S))
311 ? origParentScope : independentParentScope);
312
313 unsigned StmtsToSkip = 0u;
314
315 // If we found a label, remember that it is in ParentScope scope.
316 switch (S->getStmtClass()) {
317 case Stmt::AddrLabelExprClass:
318 IndirectJumpTargets.push_back(Elt: cast<AddrLabelExpr>(Val: S)->getLabel());
319 break;
320
321 case Stmt::ObjCForCollectionStmtClass: {
322 auto *CS = cast<ObjCForCollectionStmt>(Val: S);
323 unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
324 unsigned NewParentScope = Scopes.size();
325 Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
326 BuildScopeInformation(S: CS->getBody(), origParentScope&: NewParentScope);
327 return;
328 }
329
330 case Stmt::IndirectGotoStmtClass:
331 // "goto *&&lbl;" is a special case which we treat as equivalent
332 // to a normal goto. In addition, we don't calculate scope in the
333 // operand (to avoid recording the address-of-label use), which
334 // works only because of the restricted set of expressions which
335 // we detect as constant targets.
336 if (cast<IndirectGotoStmt>(Val: S)->getConstantTarget())
337 goto RecordJumpScope;
338
339 LabelAndGotoScopes[S] = ParentScope;
340 IndirectJumps.push_back(Elt: S);
341 break;
342
343 case Stmt::SwitchStmtClass:
344 // Evaluate the C++17 init stmt and condition variable
345 // before entering the scope of the switch statement.
346 if (Stmt *Init = cast<SwitchStmt>(Val: S)->getInit()) {
347 BuildScopeInformation(S: Init, origParentScope&: ParentScope);
348 ++StmtsToSkip;
349 }
350 if (VarDecl *Var = cast<SwitchStmt>(Val: S)->getConditionVariable()) {
351 BuildScopeInformation(D: Var, ParentScope);
352 ++StmtsToSkip;
353 }
354 goto RecordJumpScope;
355
356 case Stmt::GCCAsmStmtClass:
357 if (!cast<GCCAsmStmt>(Val: S)->isAsmGoto())
358 break;
359 [[fallthrough]];
360
361 case Stmt::GotoStmtClass:
362 RecordJumpScope:
363 // Remember both what scope a goto is in as well as the fact that we have
364 // it. This makes the second scan not have to walk the AST again.
365 LabelAndGotoScopes[S] = ParentScope;
366 Jumps.push_back(Elt: S);
367 break;
368
369 case Stmt::IfStmtClass: {
370 IfStmt *IS = cast<IfStmt>(Val: S);
371 if (!(IS->isConstexpr() || IS->isConsteval() ||
372 IS->isObjCAvailabilityCheck()))
373 break;
374
375 unsigned Diag = diag::note_protected_by_if_available;
376 if (IS->isConstexpr())
377 Diag = diag::note_protected_by_constexpr_if;
378 else if (IS->isConsteval())
379 Diag = diag::note_protected_by_consteval_if;
380
381 if (VarDecl *Var = IS->getConditionVariable())
382 BuildScopeInformation(D: Var, ParentScope);
383
384 // Cannot jump into the middle of the condition.
385 unsigned NewParentScope = Scopes.size();
386 Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
387
388 if (!IS->isConsteval())
389 BuildScopeInformation(S: IS->getCond(), origParentScope&: NewParentScope);
390
391 // Jumps into either arm of an 'if constexpr' are not allowed.
392 NewParentScope = Scopes.size();
393 Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
394 BuildScopeInformation(S: IS->getThen(), origParentScope&: NewParentScope);
395 if (Stmt *Else = IS->getElse()) {
396 NewParentScope = Scopes.size();
397 Scopes.push_back(Elt: GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
398 BuildScopeInformation(S: Else, origParentScope&: NewParentScope);
399 }
400 return;
401 }
402
403 case Stmt::CXXTryStmtClass: {
404 CXXTryStmt *TS = cast<CXXTryStmt>(Val: S);
405 {
406 unsigned NewParentScope = Scopes.size();
407 Scopes.push_back(Elt: GotoScope(ParentScope,
408 diag::note_protected_by_cxx_try,
409 diag::note_exits_cxx_try,
410 TS->getSourceRange().getBegin()));
411 if (Stmt *TryBlock = TS->getTryBlock())
412 BuildScopeInformation(S: TryBlock, origParentScope&: NewParentScope);
413 }
414
415 // Jump from the catch into the try is not allowed either.
416 for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
417 CXXCatchStmt *CS = TS->getHandler(i: I);
418 unsigned NewParentScope = Scopes.size();
419 Scopes.push_back(Elt: GotoScope(ParentScope,
420 diag::note_protected_by_cxx_catch,
421 diag::note_exits_cxx_catch,
422 CS->getSourceRange().getBegin()));
423 BuildScopeInformation(S: CS->getHandlerBlock(), origParentScope&: NewParentScope);
424 }
425 return;
426 }
427
428 case Stmt::SEHTryStmtClass: {
429 SEHTryStmt *TS = cast<SEHTryStmt>(Val: S);
430 {
431 unsigned NewParentScope = Scopes.size();
432 Scopes.push_back(Elt: GotoScope(ParentScope,
433 diag::note_protected_by_seh_try,
434 diag::note_exits_seh_try,
435 TS->getSourceRange().getBegin()));
436 if (Stmt *TryBlock = TS->getTryBlock())
437 BuildScopeInformation(S: TryBlock, origParentScope&: NewParentScope);
438 }
439
440 // Jump from __except or __finally into the __try are not allowed either.
441 if (SEHExceptStmt *Except = TS->getExceptHandler()) {
442 unsigned NewParentScope = Scopes.size();
443 Scopes.push_back(Elt: GotoScope(ParentScope,
444 diag::note_protected_by_seh_except,
445 diag::note_exits_seh_except,
446 Except->getSourceRange().getBegin()));
447 BuildScopeInformation(S: Except->getBlock(), origParentScope&: NewParentScope);
448 } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
449 unsigned NewParentScope = Scopes.size();
450 Scopes.push_back(Elt: GotoScope(ParentScope,
451 diag::note_protected_by_seh_finally,
452 diag::note_exits_seh_finally,
453 Finally->getSourceRange().getBegin()));
454 BuildScopeInformation(S: Finally->getBlock(), origParentScope&: NewParentScope);
455 }
456
457 return;
458 }
459
460 case Stmt::DeclStmtClass: {
461 // If this is a declstmt with a VLA definition, it defines a scope from here
462 // to the end of the containing context.
463 DeclStmt *DS = cast<DeclStmt>(Val: S);
464 // The decl statement creates a scope if any of the decls in it are VLAs
465 // or have the cleanup attribute.
466 for (auto *I : DS->decls())
467 BuildScopeInformation(D: I, ParentScope&: origParentScope);
468 return;
469 }
470
471 case Stmt::StmtExprClass: {
472 // [GNU]
473 // Jumping into a statement expression with goto or using
474 // a switch statement outside the statement expression with
475 // a case or default label inside the statement expression is not permitted.
476 // Jumping out of a statement expression is permitted.
477 StmtExpr *SE = cast<StmtExpr>(Val: S);
478 unsigned NewParentScope = Scopes.size();
479 Scopes.push_back(Elt: GotoScope(ParentScope,
480 diag::note_enters_statement_expression,
481 /*OutDiag=*/0, SE->getBeginLoc()));
482 BuildScopeInformation(S: SE->getSubStmt(), origParentScope&: NewParentScope);
483 return;
484 }
485
486 case Stmt::ObjCAtTryStmtClass: {
487 // Disallow jumps into any part of an @try statement by pushing a scope and
488 // walking all sub-stmts in that scope.
489 ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(Val: S);
490 // Recursively walk the AST for the @try part.
491 {
492 unsigned NewParentScope = Scopes.size();
493 Scopes.push_back(Elt: GotoScope(ParentScope,
494 diag::note_protected_by_objc_try,
495 diag::note_exits_objc_try,
496 AT->getAtTryLoc()));
497 if (Stmt *TryPart = AT->getTryBody())
498 BuildScopeInformation(S: TryPart, origParentScope&: NewParentScope);
499 }
500
501 // Jump from the catch to the finally or try is not valid.
502 for (ObjCAtCatchStmt *AC : AT->catch_stmts()) {
503 unsigned NewParentScope = Scopes.size();
504 Scopes.push_back(Elt: GotoScope(ParentScope,
505 diag::note_protected_by_objc_catch,
506 diag::note_exits_objc_catch,
507 AC->getAtCatchLoc()));
508 // @catches are nested and it isn't
509 BuildScopeInformation(S: AC->getCatchBody(), origParentScope&: NewParentScope);
510 }
511
512 // Jump from the finally to the try or catch is not valid.
513 if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
514 unsigned NewParentScope = Scopes.size();
515 Scopes.push_back(Elt: GotoScope(ParentScope,
516 diag::note_protected_by_objc_finally,
517 diag::note_exits_objc_finally,
518 AF->getAtFinallyLoc()));
519 BuildScopeInformation(S: AF, origParentScope&: NewParentScope);
520 }
521
522 return;
523 }
524
525 case Stmt::ObjCAtSynchronizedStmtClass: {
526 // Disallow jumps into the protected statement of an @synchronized, but
527 // allow jumps into the object expression it protects.
528 ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(Val: S);
529 // Recursively walk the AST for the @synchronized object expr, it is
530 // evaluated in the normal scope.
531 BuildScopeInformation(S: AS->getSynchExpr(), origParentScope&: ParentScope);
532
533 // Recursively walk the AST for the @synchronized part, protected by a new
534 // scope.
535 unsigned NewParentScope = Scopes.size();
536 Scopes.push_back(Elt: GotoScope(ParentScope,
537 diag::note_protected_by_objc_synchronized,
538 diag::note_exits_objc_synchronized,
539 AS->getAtSynchronizedLoc()));
540 BuildScopeInformation(S: AS->getSynchBody(), origParentScope&: NewParentScope);
541 return;
542 }
543
544 case Stmt::ObjCAutoreleasePoolStmtClass: {
545 // Disallow jumps into the protected statement of an @autoreleasepool.
546 ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(Val: S);
547 // Recursively walk the AST for the @autoreleasepool part, protected by a
548 // new scope.
549 unsigned NewParentScope = Scopes.size();
550 Scopes.push_back(Elt: GotoScope(ParentScope,
551 diag::note_protected_by_objc_autoreleasepool,
552 diag::note_exits_objc_autoreleasepool,
553 AS->getAtLoc()));
554 BuildScopeInformation(S: AS->getSubStmt(), origParentScope&: NewParentScope);
555 return;
556 }
557
558 case Stmt::ExprWithCleanupsClass: {
559 // Disallow jumps past full-expressions that use blocks with
560 // non-trivial cleanups of their captures. This is theoretically
561 // implementable but a lot of work which we haven't felt up to doing.
562 ExprWithCleanups *EWC = cast<ExprWithCleanups>(Val: S);
563 for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
564 if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
565 for (const auto &CI : BDecl->captures()) {
566 VarDecl *variable = CI.getVariable();
567 BuildScopeInformation(D: variable, BDecl, ParentScope&: origParentScope);
568 }
569 else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
570 BuildScopeInformation(CLE, ParentScope&: origParentScope);
571 else
572 llvm_unreachable("unexpected cleanup object type");
573 }
574 break;
575 }
576
577 case Stmt::MaterializeTemporaryExprClass: {
578 // Disallow jumps out of scopes containing temporaries lifetime-extended to
579 // automatic storage duration.
580 MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(Val: S);
581 if (MTE->getStorageDuration() == SD_Automatic) {
582 const Expr *ExtendedObject =
583 MTE->getSubExpr()->skipRValueSubobjectAdjustments();
584 if (ExtendedObject->getType().isDestructedType()) {
585 Scopes.push_back(Elt: GotoScope(ParentScope, 0,
586 diag::note_exits_temporary_dtor,
587 ExtendedObject->getExprLoc()));
588 origParentScope = Scopes.size()-1;
589 }
590 }
591 break;
592 }
593
594 case Stmt::CaseStmtClass:
595 case Stmt::DefaultStmtClass:
596 case Stmt::LabelStmtClass:
597 LabelAndGotoScopes[S] = ParentScope;
598 break;
599
600 case Stmt::AttributedStmtClass: {
601 AttributedStmt *AS = cast<AttributedStmt>(Val: S);
602 if (GetMustTailAttr(AS)) {
603 LabelAndGotoScopes[AS] = ParentScope;
604 MustTailStmts.push_back(Elt: AS);
605 }
606 break;
607 }
608
609 case Stmt::OpenACCComputeConstructClass: {
610 unsigned NewParentScope = Scopes.size();
611 OpenACCComputeConstruct *CC = cast<OpenACCComputeConstruct>(Val: S);
612 Scopes.push_back(Elt: GotoScope(
613 ParentScope, diag::note_acc_branch_into_compute_construct,
614 diag::note_acc_branch_out_of_compute_construct, CC->getBeginLoc()));
615 BuildScopeInformation(S: CC->getStructuredBlock(), origParentScope&: NewParentScope);
616 return;
617 }
618
619 default:
620 if (auto *ED = dyn_cast<OMPExecutableDirective>(Val: S)) {
621 if (!ED->isStandaloneDirective()) {
622 unsigned NewParentScope = Scopes.size();
623 Scopes.emplace_back(Args&: ParentScope,
624 Args: diag::note_omp_protected_structured_block,
625 Args: diag::note_omp_exits_structured_block,
626 Args: ED->getStructuredBlock()->getBeginLoc());
627 BuildScopeInformation(S: ED->getStructuredBlock(), origParentScope&: NewParentScope);
628 return;
629 }
630 }
631 break;
632 }
633
634 for (Stmt *SubStmt : S->children()) {
635 if (!SubStmt)
636 continue;
637 if (StmtsToSkip) {
638 --StmtsToSkip;
639 continue;
640 }
641
642 // Cases, labels, and defaults aren't "scope parents". It's also
643 // important to handle these iteratively instead of recursively in
644 // order to avoid blowing out the stack.
645 while (true) {
646 Stmt *Next;
647 if (SwitchCase *SC = dyn_cast<SwitchCase>(Val: SubStmt))
648 Next = SC->getSubStmt();
649 else if (LabelStmt *LS = dyn_cast<LabelStmt>(Val: SubStmt))
650 Next = LS->getSubStmt();
651 else
652 break;
653
654 LabelAndGotoScopes[SubStmt] = ParentScope;
655 SubStmt = Next;
656 }
657
658 // Recursively walk the AST.
659 BuildScopeInformation(S: SubStmt, origParentScope&: ParentScope);
660 }
661}
662
663/// VerifyJumps - Verify each element of the Jumps array to see if they are
664/// valid, emitting diagnostics if not.
665void JumpScopeChecker::VerifyJumps() {
666 while (!Jumps.empty()) {
667 Stmt *Jump = Jumps.pop_back_val();
668
669 // With a goto,
670 if (GotoStmt *GS = dyn_cast<GotoStmt>(Val: Jump)) {
671 // The label may not have a statement if it's coming from inline MS ASM.
672 if (GS->getLabel()->getStmt()) {
673 CheckJump(From: GS, To: GS->getLabel()->getStmt(), DiagLoc: GS->getGotoLoc(),
674 JumpDiag: diag::err_goto_into_protected_scope,
675 JumpDiagWarning: diag::ext_goto_into_protected_scope,
676 JumpDiagCXX98Compat: diag::warn_cxx98_compat_goto_into_protected_scope);
677 }
678 CheckGotoStmt(GS);
679 continue;
680 }
681
682 // If an asm goto jumps to a different scope, things like destructors or
683 // initializers might not be run which may be suprising to users. Perhaps
684 // this behavior can be changed in the future, but today Clang will not
685 // generate such code. Produce a diagnostic instead. See also the
686 // discussion here: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110728.
687 if (auto *G = dyn_cast<GCCAsmStmt>(Val: Jump)) {
688 for (AddrLabelExpr *L : G->labels()) {
689 LabelDecl *LD = L->getLabel();
690 unsigned JumpScope = LabelAndGotoScopes[G];
691 unsigned TargetScope = LabelAndGotoScopes[LD->getStmt()];
692 if (JumpScope != TargetScope)
693 DiagnoseIndirectOrAsmJump(IG: G, IGScope: JumpScope, Target: LD, TargetScope);
694 }
695 continue;
696 }
697
698 // We only get indirect gotos here when they have a constant target.
699 if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Val: Jump)) {
700 LabelDecl *Target = IGS->getConstantTarget();
701 CheckJump(From: IGS, To: Target->getStmt(), DiagLoc: IGS->getGotoLoc(),
702 JumpDiag: diag::err_goto_into_protected_scope,
703 JumpDiagWarning: diag::ext_goto_into_protected_scope,
704 JumpDiagCXX98Compat: diag::warn_cxx98_compat_goto_into_protected_scope);
705 continue;
706 }
707
708 SwitchStmt *SS = cast<SwitchStmt>(Val: Jump);
709 for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
710 SC = SC->getNextSwitchCase()) {
711 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
712 continue;
713 SourceLocation Loc;
714 if (CaseStmt *CS = dyn_cast<CaseStmt>(Val: SC))
715 Loc = CS->getBeginLoc();
716 else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(Val: SC))
717 Loc = DS->getBeginLoc();
718 else
719 Loc = SC->getBeginLoc();
720 CheckJump(From: SS, To: SC, DiagLoc: Loc, JumpDiag: diag::err_switch_into_protected_scope, JumpDiagWarning: 0,
721 JumpDiagCXX98Compat: diag::warn_cxx98_compat_switch_into_protected_scope);
722 }
723 }
724}
725
726/// VerifyIndirectJumps - Verify whether any possible indirect goto jump might
727/// cross a protection boundary. Unlike direct jumps, indirect goto jumps
728/// count cleanups as protection boundaries: since there's no way to know where
729/// the jump is going, we can't implicitly run the right cleanups the way we
730/// can with direct jumps. Thus, an indirect/asm jump is "trivial" if it
731/// bypasses no initializations and no teardowns. More formally, an
732/// indirect/asm jump from A to B is trivial if the path out from A to DCA(A,B)
733/// is trivial and the path in from DCA(A,B) to B is trivial, where DCA(A,B) is
734/// the deepest common ancestor of A and B. Jump-triviality is transitive but
735/// asymmetric.
736///
737/// A path in is trivial if none of the entered scopes have an InDiag.
738/// A path out is trivial is none of the exited scopes have an OutDiag.
739///
740/// Under these definitions, this function checks that the indirect
741/// jump between A and B is trivial for every indirect goto statement A
742/// and every label B whose address was taken in the function.
743void JumpScopeChecker::VerifyIndirectJumps() {
744 if (IndirectJumps.empty())
745 return;
746 // If there aren't any address-of-label expressions in this function,
747 // complain about the first indirect goto.
748 if (IndirectJumpTargets.empty()) {
749 S.Diag(Loc: IndirectJumps[0]->getBeginLoc(),
750 DiagID: diag::err_indirect_goto_without_addrlabel);
751 return;
752 }
753 // Collect a single representative of every scope containing an indirect
754 // goto. For most code bases, this substantially cuts down on the number of
755 // jump sites we'll have to consider later.
756 using JumpScope = std::pair<unsigned, Stmt *>;
757 SmallVector<JumpScope, 32> JumpScopes;
758 {
759 llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
760 for (Stmt *IG : IndirectJumps) {
761 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
762 continue;
763 unsigned IGScope = LabelAndGotoScopes[IG];
764 if (!JumpScopesMap.contains(Val: IGScope))
765 JumpScopesMap[IGScope] = IG;
766 }
767 JumpScopes.reserve(N: JumpScopesMap.size());
768 for (auto &Pair : JumpScopesMap)
769 JumpScopes.emplace_back(Args&: Pair);
770 }
771
772 // Collect a single representative of every scope containing a
773 // label whose address was taken somewhere in the function.
774 // For most code bases, there will be only one such scope.
775 llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
776 for (LabelDecl *TheLabel : IndirectJumpTargets) {
777 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
778 continue;
779 unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
780 if (!TargetScopes.contains(Val: LabelScope))
781 TargetScopes[LabelScope] = TheLabel;
782 }
783
784 // For each target scope, make sure it's trivially reachable from
785 // every scope containing a jump site.
786 //
787 // A path between scopes always consists of exitting zero or more
788 // scopes, then entering zero or more scopes. We build a set of
789 // of scopes S from which the target scope can be trivially
790 // entered, then verify that every jump scope can be trivially
791 // exitted to reach a scope in S.
792 llvm::BitVector Reachable(Scopes.size(), false);
793 for (auto [TargetScope, TargetLabel] : TargetScopes) {
794 Reachable.reset();
795
796 // Mark all the enclosing scopes from which you can safely jump
797 // into the target scope. 'Min' will end up being the index of
798 // the shallowest such scope.
799 unsigned Min = TargetScope;
800 while (true) {
801 Reachable.set(Min);
802
803 // Don't go beyond the outermost scope.
804 if (Min == 0) break;
805
806 // Stop if we can't trivially enter the current scope.
807 if (Scopes[Min].InDiag) break;
808
809 Min = Scopes[Min].ParentScope;
810 }
811
812 // Walk through all the jump sites, checking that they can trivially
813 // reach this label scope.
814 for (auto [JumpScope, JumpStmt] : JumpScopes) {
815 unsigned Scope = JumpScope;
816 // Walk out the "scope chain" for this scope, looking for a scope
817 // we've marked reachable. For well-formed code this amortizes
818 // to O(JumpScopes.size() / Scopes.size()): we only iterate
819 // when we see something unmarked, and in well-formed code we
820 // mark everything we iterate past.
821 bool IsReachable = false;
822 while (true) {
823 if (Reachable.test(Idx: Scope)) {
824 // If we find something reachable, mark all the scopes we just
825 // walked through as reachable.
826 for (unsigned S = JumpScope; S != Scope; S = Scopes[S].ParentScope)
827 Reachable.set(S);
828 IsReachable = true;
829 break;
830 }
831
832 // Don't walk out if we've reached the top-level scope or we've
833 // gotten shallower than the shallowest reachable scope.
834 if (Scope == 0 || Scope < Min) break;
835
836 // Don't walk out through an out-diagnostic.
837 if (Scopes[Scope].OutDiag) break;
838
839 Scope = Scopes[Scope].ParentScope;
840 }
841
842 // Only diagnose if we didn't find something.
843 if (IsReachable) continue;
844
845 DiagnoseIndirectOrAsmJump(IG: JumpStmt, IGScope: JumpScope, Target: TargetLabel, TargetScope);
846 }
847 }
848}
849
850/// Return true if a particular error+note combination must be downgraded to a
851/// warning in Microsoft mode.
852static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
853 return (JumpDiag == diag::err_goto_into_protected_scope &&
854 (InDiagNote == diag::note_protected_by_variable_init ||
855 InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
856}
857
858/// Return true if a particular note should be downgraded to a compatibility
859/// warning in C++11 mode.
860static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
861 return S.getLangOpts().CPlusPlus11 &&
862 InDiagNote == diag::note_protected_by_variable_non_pod;
863}
864
865/// Produce primary diagnostic for an indirect jump statement.
866static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
867 LabelDecl *Target, bool &Diagnosed) {
868 if (Diagnosed)
869 return;
870 bool IsAsmGoto = isa<GCCAsmStmt>(Val: Jump);
871 S.Diag(Loc: Jump->getBeginLoc(), DiagID: diag::err_indirect_goto_in_protected_scope)
872 << IsAsmGoto;
873 S.Diag(Loc: Target->getStmt()->getIdentLoc(), DiagID: diag::note_indirect_goto_target)
874 << IsAsmGoto;
875 Diagnosed = true;
876}
877
878/// Produce note diagnostics for a jump into a protected scope.
879void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
880 if (CHECK_PERMISSIVE(ToScopes.empty()))
881 return;
882 for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
883 if (Scopes[ToScopes[I]].InDiag)
884 S.Diag(Loc: Scopes[ToScopes[I]].Loc, DiagID: Scopes[ToScopes[I]].InDiag);
885}
886
887/// Diagnose an indirect jump which is known to cross scopes.
888void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
889 LabelDecl *Target,
890 unsigned TargetScope) {
891 if (CHECK_PERMISSIVE(JumpScope == TargetScope))
892 return;
893
894 unsigned Common = GetDeepestCommonScope(A: JumpScope, B: TargetScope);
895 bool Diagnosed = false;
896
897 // Walk out the scope chain until we reach the common ancestor.
898 for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
899 if (Scopes[I].OutDiag) {
900 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
901 S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].OutDiag);
902 }
903
904 SmallVector<unsigned, 10> ToScopesCXX98Compat;
905
906 // Now walk into the scopes containing the label whose address was taken.
907 for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
908 if (IsCXX98CompatWarning(S, InDiagNote: Scopes[I].InDiag))
909 ToScopesCXX98Compat.push_back(Elt: I);
910 else if (Scopes[I].InDiag) {
911 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
912 S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].InDiag);
913 }
914
915 // Diagnose this jump if it would be ill-formed in C++98.
916 if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
917 bool IsAsmGoto = isa<GCCAsmStmt>(Val: Jump);
918 S.Diag(Loc: Jump->getBeginLoc(),
919 DiagID: diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
920 << IsAsmGoto;
921 S.Diag(Loc: Target->getStmt()->getIdentLoc(), DiagID: diag::note_indirect_goto_target)
922 << IsAsmGoto;
923 NoteJumpIntoScopes(ToScopes: ToScopesCXX98Compat);
924 }
925}
926
927/// CheckJump - Validate that the specified jump statement is valid: that it is
928/// jumping within or out of its current scope, not into a deeper one.
929void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
930 unsigned JumpDiagError, unsigned JumpDiagWarning,
931 unsigned JumpDiagCXX98Compat) {
932 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
933 return;
934 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
935 return;
936
937 unsigned FromScope = LabelAndGotoScopes[From];
938 unsigned ToScope = LabelAndGotoScopes[To];
939
940 // Common case: exactly the same scope, which is fine.
941 if (FromScope == ToScope) return;
942
943 // Warn on gotos out of __finally blocks.
944 if (isa<GotoStmt>(Val: From) || isa<IndirectGotoStmt>(Val: From)) {
945 // If FromScope > ToScope, FromScope is more nested and the jump goes to a
946 // less nested scope. Check if it crosses a __finally along the way.
947 for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
948 if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
949 S.Diag(Loc: From->getBeginLoc(), DiagID: diag::warn_jump_out_of_seh_finally);
950 break;
951 } else if (Scopes[I].InDiag ==
952 diag::note_omp_protected_structured_block) {
953 S.Diag(Loc: From->getBeginLoc(), DiagID: diag::err_goto_into_protected_scope);
954 S.Diag(Loc: To->getBeginLoc(), DiagID: diag::note_omp_exits_structured_block);
955 break;
956 } else if (Scopes[I].InDiag ==
957 diag::note_acc_branch_into_compute_construct) {
958 S.Diag(Loc: From->getBeginLoc(), DiagID: diag::err_goto_into_protected_scope);
959 S.Diag(Loc: Scopes[I].Loc, DiagID: diag::note_acc_branch_out_of_compute_construct);
960 return;
961 }
962 }
963 }
964
965 unsigned CommonScope = GetDeepestCommonScope(A: FromScope, B: ToScope);
966
967 // It's okay to jump out from a nested scope.
968 if (CommonScope == ToScope) return;
969
970 // Pull out (and reverse) any scopes we might need to diagnose skipping.
971 SmallVector<unsigned, 10> ToScopesCXX98Compat;
972 SmallVector<unsigned, 10> ToScopesError;
973 SmallVector<unsigned, 10> ToScopesWarning;
974 for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
975 if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
976 IsMicrosoftJumpWarning(JumpDiag: JumpDiagError, InDiagNote: Scopes[I].InDiag))
977 ToScopesWarning.push_back(Elt: I);
978 else if (IsCXX98CompatWarning(S, InDiagNote: Scopes[I].InDiag))
979 ToScopesCXX98Compat.push_back(Elt: I);
980 else if (Scopes[I].InDiag)
981 ToScopesError.push_back(Elt: I);
982 }
983
984 // Handle warnings.
985 if (!ToScopesWarning.empty()) {
986 S.Diag(Loc: DiagLoc, DiagID: JumpDiagWarning);
987 NoteJumpIntoScopes(ToScopes: ToScopesWarning);
988 assert(isa<LabelStmt>(To));
989 LabelStmt *Label = cast<LabelStmt>(Val: To);
990 Label->setSideEntry(true);
991 }
992
993 // Handle errors.
994 if (!ToScopesError.empty()) {
995 S.Diag(Loc: DiagLoc, DiagID: JumpDiagError);
996 NoteJumpIntoScopes(ToScopes: ToScopesError);
997 }
998
999 // Handle -Wc++98-compat warnings if the jump is well-formed.
1000 if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
1001 S.Diag(Loc: DiagLoc, DiagID: JumpDiagCXX98Compat);
1002 NoteJumpIntoScopes(ToScopes: ToScopesCXX98Compat);
1003 }
1004}
1005
1006void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
1007 if (GS->getLabel()->isMSAsmLabel()) {
1008 S.Diag(Loc: GS->getGotoLoc(), DiagID: diag::err_goto_ms_asm_label)
1009 << GS->getLabel()->getIdentifier();
1010 S.Diag(Loc: GS->getLabel()->getLocation(), DiagID: diag::note_goto_ms_asm_label)
1011 << GS->getLabel()->getIdentifier();
1012 }
1013}
1014
1015void JumpScopeChecker::VerifyMustTailStmts() {
1016 for (AttributedStmt *AS : MustTailStmts) {
1017 for (unsigned I = LabelAndGotoScopes[AS]; I; I = Scopes[I].ParentScope) {
1018 if (Scopes[I].OutDiag) {
1019 S.Diag(Loc: AS->getBeginLoc(), DiagID: diag::err_musttail_scope);
1020 S.Diag(Loc: Scopes[I].Loc, DiagID: Scopes[I].OutDiag);
1021 }
1022 }
1023 }
1024}
1025
1026const Attr *JumpScopeChecker::GetMustTailAttr(AttributedStmt *AS) {
1027 ArrayRef<const Attr *> Attrs = AS->getAttrs();
1028 const auto *Iter =
1029 llvm::find_if(Range&: Attrs, P: [](const Attr *A) { return isa<MustTailAttr>(Val: A); });
1030 return Iter != Attrs.end() ? *Iter : nullptr;
1031}
1032
1033void Sema::DiagnoseInvalidJumps(Stmt *Body) {
1034 (void)JumpScopeChecker(Body, *this);
1035}
1036