1//=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines stack address leak checker, which checks if an invalid
10// stack address is stored into a global or heap location. See CERT DCL30-C.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/AST/ExprCXX.h"
15#include "clang/Basic/SourceManager.h"
16#include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h"
17#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
18#include "clang/StaticAnalyzer/Core/Checker.h"
19#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
22#include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h"
23#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
24#include "llvm/ADT/STLExtras.h"
25#include "llvm/Support/raw_ostream.h"
26using namespace clang;
27using namespace ento;
28
29namespace {
30class StackAddrEscapeChecker
31 : public Checker<check::PreCall, check::PreStmt<ReturnStmt>,
32 check::EndFunction> {
33 mutable IdentifierInfo *dispatch_semaphore_tII = nullptr;
34 mutable std::unique_ptr<BugType> BT_stackleak;
35 mutable std::unique_ptr<BugType> BT_returnstack;
36 mutable std::unique_ptr<BugType> BT_capturedstackasync;
37 mutable std::unique_ptr<BugType> BT_capturedstackret;
38
39public:
40 enum CheckKind {
41 CK_StackAddrEscapeChecker,
42 CK_StackAddrAsyncEscapeChecker,
43 CK_NumCheckKinds
44 };
45
46 bool ChecksEnabled[CK_NumCheckKinds] = {false};
47 CheckerNameRef CheckNames[CK_NumCheckKinds];
48
49 void checkPreCall(const CallEvent &Call, CheckerContext &C) const;
50 void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const;
51 void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const;
52
53private:
54 void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B,
55 CheckerContext &C) const;
56 void EmitReturnLeakError(CheckerContext &C, const MemRegion *LeakedRegion,
57 const Expr *RetE) const;
58 bool isSemaphoreCaptured(const BlockDecl &B) const;
59 static SourceRange genName(raw_ostream &os, const MemRegion *R,
60 ASTContext &Ctx);
61 static SmallVector<std::pair<const MemRegion *, const StackSpaceRegion *>, 4>
62 getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C);
63 static bool isNotInCurrentFrame(const StackSpaceRegion *MS,
64 CheckerContext &C);
65};
66} // namespace
67
68SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R,
69 ASTContext &Ctx) {
70 // Get the base region, stripping away fields and elements.
71 R = R->getBaseRegion();
72 SourceManager &SM = Ctx.getSourceManager();
73 SourceRange range;
74 os << "Address of ";
75
76 // Check if the region is a compound literal.
77 if (const auto *CR = dyn_cast<CompoundLiteralRegion>(Val: R)) {
78 const CompoundLiteralExpr *CL = CR->getLiteralExpr();
79 os << "stack memory associated with a compound literal "
80 "declared on line "
81 << SM.getExpansionLineNumber(Loc: CL->getBeginLoc());
82 range = CL->getSourceRange();
83 } else if (const auto *AR = dyn_cast<AllocaRegion>(Val: R)) {
84 const Expr *ARE = AR->getExpr();
85 SourceLocation L = ARE->getBeginLoc();
86 range = ARE->getSourceRange();
87 os << "stack memory allocated by call to alloca() on line "
88 << SM.getExpansionLineNumber(Loc: L);
89 } else if (const auto *BR = dyn_cast<BlockDataRegion>(Val: R)) {
90 const BlockDecl *BD = BR->getCodeRegion()->getDecl();
91 SourceLocation L = BD->getBeginLoc();
92 range = BD->getSourceRange();
93 os << "stack-allocated block declared on line "
94 << SM.getExpansionLineNumber(Loc: L);
95 } else if (const auto *VR = dyn_cast<VarRegion>(Val: R)) {
96 os << "stack memory associated with local variable '" << VR->getString()
97 << '\'';
98 range = VR->getDecl()->getSourceRange();
99 } else if (const auto *LER = dyn_cast<CXXLifetimeExtendedObjectRegion>(Val: R)) {
100 QualType Ty = LER->getValueType().getLocalUnqualifiedType();
101 os << "stack memory associated with temporary object of type '";
102 Ty.print(OS&: os, Policy: Ctx.getPrintingPolicy());
103 os << "' lifetime extended by local variable";
104 if (const IdentifierInfo *ID = LER->getExtendingDecl()->getIdentifier())
105 os << " '" << ID->getName() << '\'';
106 range = LER->getExpr()->getSourceRange();
107 } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(Val: R)) {
108 QualType Ty = TOR->getValueType().getLocalUnqualifiedType();
109 os << "stack memory associated with temporary object of type '";
110 Ty.print(OS&: os, Policy: Ctx.getPrintingPolicy());
111 os << "'";
112 range = TOR->getExpr()->getSourceRange();
113 } else {
114 llvm_unreachable("Invalid region in ReturnStackAddressChecker.");
115 }
116
117 return range;
118}
119
120bool StackAddrEscapeChecker::isNotInCurrentFrame(const StackSpaceRegion *MS,
121 CheckerContext &C) {
122 return MS->getStackFrame() != C.getStackFrame();
123}
124
125bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const {
126 if (!dispatch_semaphore_tII)
127 dispatch_semaphore_tII = &B.getASTContext().Idents.get(Name: "dispatch_semaphore_t");
128 for (const auto &C : B.captures()) {
129 const auto *T = C.getVariable()->getType()->getAs<TypedefType>();
130 if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII)
131 return true;
132 }
133 return false;
134}
135
136SmallVector<std::pair<const MemRegion *, const StackSpaceRegion *>, 4>
137StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B,
138 CheckerContext &C) {
139 SmallVector<std::pair<const MemRegion *, const StackSpaceRegion *>, 4>
140 Regions;
141 ProgramStateRef State = C.getState();
142 for (auto Var : B.referenced_vars()) {
143 SVal Val = State->getSVal(R: Var.getCapturedRegion());
144 if (const MemRegion *Region = Val.getAsRegion()) {
145 if (const auto *Space =
146 Region->getMemorySpaceAs<StackSpaceRegion>(State)) {
147 Regions.emplace_back(Args&: Region, Args&: Space);
148 }
149 }
150 }
151 return Regions;
152}
153
154static void EmitReturnedAsPartOfError(llvm::raw_ostream &OS, SVal ReturnedVal,
155 const MemRegion *LeakedRegion) {
156 if (const MemRegion *ReturnedRegion = ReturnedVal.getAsRegion()) {
157 if (isa<BlockDataRegion>(Val: ReturnedRegion)) {
158 OS << " is captured by a returned block";
159 return;
160 }
161 }
162
163 // Generic message
164 OS << " returned to caller";
165}
166
167void StackAddrEscapeChecker::EmitReturnLeakError(CheckerContext &C,
168 const MemRegion *R,
169 const Expr *RetE) const {
170 ExplodedNode *N = C.generateNonFatalErrorNode();
171 if (!N)
172 return;
173 if (!BT_returnstack)
174 BT_returnstack = std::make_unique<BugType>(
175 args: CheckNames[CK_StackAddrEscapeChecker],
176 args: "Return of address to stack-allocated memory");
177
178 // Generate a report for this bug.
179 SmallString<128> buf;
180 llvm::raw_svector_ostream os(buf);
181
182 // Error message formatting
183 SourceRange range = genName(os, R, Ctx&: C.getASTContext());
184 EmitReturnedAsPartOfError(OS&: os, ReturnedVal: C.getSVal(S: RetE), LeakedRegion: R);
185
186 auto report =
187 std::make_unique<PathSensitiveBugReport>(args&: *BT_returnstack, args: os.str(), args&: N);
188 report->addRange(R: RetE->getSourceRange());
189 if (range.isValid())
190 report->addRange(R: range);
191 C.emitReport(R: std::move(report));
192}
193
194void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures(
195 const BlockDataRegion &B, CheckerContext &C) const {
196 // There is a not-too-uncommon idiom
197 // where a block passed to dispatch_async captures a semaphore
198 // and then the thread (which called dispatch_async) is blocked on waiting
199 // for the completion of the execution of the block
200 // via dispatch_semaphore_wait. To avoid false-positives (for now)
201 // we ignore all the blocks which have captured
202 // a variable of the type "dispatch_semaphore_t".
203 if (isSemaphoreCaptured(B: *B.getDecl()))
204 return;
205 auto Regions = getCapturedStackRegions(B, C);
206 for (const MemRegion *Region : llvm::make_first_range(c&: Regions)) {
207 // The block passed to dispatch_async may capture another block
208 // created on the stack. However, there is no leak in this situaton,
209 // no matter if ARC or no ARC is enabled:
210 // dispatch_async copies the passed "outer" block (via Block_copy)
211 // and if the block has captured another "inner" block,
212 // the "inner" block will be copied as well.
213 if (isa<BlockDataRegion>(Val: Region))
214 continue;
215 ExplodedNode *N = C.generateNonFatalErrorNode();
216 if (!N)
217 continue;
218 if (!BT_capturedstackasync)
219 BT_capturedstackasync = std::make_unique<BugType>(
220 args: CheckNames[CK_StackAddrAsyncEscapeChecker],
221 args: "Address of stack-allocated memory is captured");
222 SmallString<128> Buf;
223 llvm::raw_svector_ostream Out(Buf);
224 SourceRange Range = genName(os&: Out, R: Region, Ctx&: C.getASTContext());
225 Out << " is captured by an asynchronously-executed block";
226 auto Report = std::make_unique<PathSensitiveBugReport>(
227 args&: *BT_capturedstackasync, args: Out.str(), args&: N);
228 if (Range.isValid())
229 Report->addRange(R: Range);
230 C.emitReport(R: std::move(Report));
231 }
232}
233
234void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call,
235 CheckerContext &C) const {
236 if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker])
237 return;
238 if (!Call.isGlobalCFunction(SpecificName: "dispatch_after") &&
239 !Call.isGlobalCFunction(SpecificName: "dispatch_async"))
240 return;
241 for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) {
242 if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>(
243 Val: Call.getArgSVal(Index: Idx).getAsRegion()))
244 checkAsyncExecutedBlockCaptures(B: *B, C);
245 }
246}
247
248/// A visitor made for use with a ScanReachableSymbols scanner, used
249/// for finding stack regions within an SVal that live on the current
250/// stack frame of the given checker context. This visitor excludes
251/// NonParamVarRegion that data is bound to in a BlockDataRegion's
252/// bindings, since these are likely uninteresting, e.g., in case a
253/// temporary is constructed on the stack, but it captures values
254/// that would leak.
255class FindStackRegionsSymbolVisitor final : public SymbolVisitor {
256 CheckerContext &Ctxt;
257 const StackFrameContext *PoppedStackFrame;
258 SmallVectorImpl<const MemRegion *> &EscapingStackRegions;
259
260public:
261 explicit FindStackRegionsSymbolVisitor(
262 CheckerContext &Ctxt,
263 SmallVectorImpl<const MemRegion *> &StorageForStackRegions)
264 : Ctxt(Ctxt), PoppedStackFrame(Ctxt.getStackFrame()),
265 EscapingStackRegions(StorageForStackRegions) {}
266
267 bool VisitSymbol(SymbolRef sym) override { return true; }
268
269 bool VisitMemRegion(const MemRegion *MR) override {
270 SaveIfEscapes(MR);
271
272 if (const BlockDataRegion *BDR = MR->getAs<BlockDataRegion>())
273 return VisitBlockDataRegionCaptures(BDR);
274
275 return true;
276 }
277
278private:
279 void SaveIfEscapes(const MemRegion *MR) {
280 const auto *SSR = MR->getMemorySpaceAs<StackSpaceRegion>(State: Ctxt.getState());
281
282 if (!SSR)
283 return;
284
285 const StackFrameContext *CapturedSFC = SSR->getStackFrame();
286 if (CapturedSFC == PoppedStackFrame ||
287 PoppedStackFrame->isParentOf(LC: CapturedSFC))
288 EscapingStackRegions.push_back(Elt: MR);
289 }
290
291 bool VisitBlockDataRegionCaptures(const BlockDataRegion *BDR) {
292 for (auto Var : BDR->referenced_vars()) {
293 SVal Val = Ctxt.getState()->getSVal(R: Var.getCapturedRegion());
294 const MemRegion *Region = Val.getAsRegion();
295 if (Region) {
296 SaveIfEscapes(MR: Region);
297 VisitMemRegion(MR: Region);
298 }
299 }
300
301 return false;
302 }
303};
304
305/// Given some memory regions that are flagged by FindStackRegionsSymbolVisitor,
306/// this function filters out memory regions that are being returned that are
307/// likely not true leaks:
308/// 1. If returning a block data region that has stack memory space
309/// 2. If returning a constructed object that has stack memory space
310static SmallVector<const MemRegion *> FilterReturnExpressionLeaks(
311 const SmallVectorImpl<const MemRegion *> &MaybeEscaped, CheckerContext &C,
312 const Expr *RetE, SVal &RetVal) {
313
314 SmallVector<const MemRegion *> WillEscape;
315
316 const MemRegion *RetRegion = RetVal.getAsRegion();
317
318 // Returning a record by value is fine. (In this case, the returned
319 // expression will be a copy-constructor, possibly wrapped in an
320 // ExprWithCleanups node.)
321 if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(Val: RetE))
322 RetE = Cleanup->getSubExpr();
323 bool IsConstructExpr =
324 isa<CXXConstructExpr>(Val: RetE) && RetE->getType()->isRecordType();
325
326 // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied
327 // so the stack address is not escaping here.
328 bool IsCopyAndAutoreleaseBlockObj = false;
329 if (const auto *ICE = dyn_cast<ImplicitCastExpr>(Val: RetE)) {
330 IsCopyAndAutoreleaseBlockObj =
331 isa_and_nonnull<BlockDataRegion>(Val: RetRegion) &&
332 ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject;
333 }
334
335 for (const MemRegion *MR : MaybeEscaped) {
336 if (RetRegion == MR && (IsCopyAndAutoreleaseBlockObj || IsConstructExpr))
337 continue;
338
339 WillEscape.push_back(Elt: MR);
340 }
341
342 return WillEscape;
343}
344
345/// For use in finding regions that live on the checker context's current
346/// stack frame, deep in the SVal representing the return value.
347static SmallVector<const MemRegion *>
348FindEscapingStackRegions(CheckerContext &C, const Expr *RetE, SVal RetVal) {
349 SmallVector<const MemRegion *> FoundStackRegions;
350
351 FindStackRegionsSymbolVisitor Finder(C, FoundStackRegions);
352 ScanReachableSymbols Scanner(C.getState(), Finder);
353 Scanner.scan(val: RetVal);
354
355 return FilterReturnExpressionLeaks(MaybeEscaped: FoundStackRegions, C, RetE, RetVal);
356}
357
358void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS,
359 CheckerContext &C) const {
360 if (!ChecksEnabled[CK_StackAddrEscapeChecker])
361 return;
362
363 const Expr *RetE = RS->getRetValue();
364 if (!RetE)
365 return;
366 RetE = RetE->IgnoreParens();
367
368 SVal V = C.getSVal(S: RetE);
369
370 SmallVector<const MemRegion *> EscapedStackRegions =
371 FindEscapingStackRegions(C, RetE, RetVal: V);
372
373 for (const MemRegion *ER : EscapedStackRegions)
374 EmitReturnLeakError(C, R: ER, RetE);
375}
376
377static const MemSpaceRegion *getStackOrGlobalSpaceRegion(ProgramStateRef State,
378 const MemRegion *R) {
379 assert(R);
380 if (const auto *MemSpace = R->getMemorySpace(State);
381 isa<StackSpaceRegion, GlobalsSpaceRegion>(Val: MemSpace))
382 return MemSpace;
383
384 // If R describes a lambda capture, it will be a symbolic region
385 // referring to a field region of another symbolic region.
386 if (const auto *SymReg = R->getBaseRegion()->getAs<SymbolicRegion>()) {
387 if (const auto *OriginReg = SymReg->getSymbol()->getOriginRegion())
388 return getStackOrGlobalSpaceRegion(State, R: OriginReg);
389 }
390 return nullptr;
391}
392
393static const MemRegion *getOriginBaseRegion(const MemRegion *Reg) {
394 Reg = Reg->getBaseRegion();
395 while (const auto *SymReg = dyn_cast<SymbolicRegion>(Val: Reg)) {
396 const auto *OriginReg = SymReg->getSymbol()->getOriginRegion();
397 if (!OriginReg)
398 break;
399 Reg = OriginReg->getBaseRegion();
400 }
401 return Reg;
402}
403
404static std::optional<std::string> printReferrer(ProgramStateRef State,
405 const MemRegion *Referrer) {
406 assert(Referrer);
407 const StringRef ReferrerMemorySpace = [](const MemSpaceRegion *Space) {
408 if (isa<StaticGlobalSpaceRegion>(Val: Space))
409 return "static";
410 if (isa<GlobalsSpaceRegion>(Val: Space))
411 return "global";
412 assert(isa<StackSpaceRegion>(Space));
413 // This case covers top-level and inlined analyses.
414 return "caller";
415 }(getStackOrGlobalSpaceRegion(State, R: Referrer));
416
417 while (!Referrer->canPrintPretty()) {
418 if (const auto *SymReg = dyn_cast<SymbolicRegion>(Val: Referrer);
419 SymReg && SymReg->getSymbol()->getOriginRegion()) {
420 Referrer = SymReg->getSymbol()->getOriginRegion()->getBaseRegion();
421 } else if (isa<CXXThisRegion>(Val: Referrer)) {
422 // Skip members of a class, it is handled in CheckExprLifetime.cpp as
423 // warn_bind_ref_member_to_parameter or
424 // warn_init_ptr_member_to_parameter_addr
425 return std::nullopt;
426 } else if (isa<AllocaRegion>(Val: Referrer)) {
427 // Skip alloca() regions, they indicate advanced memory management
428 // and higher likelihood of CSA false positives.
429 return std::nullopt;
430 } else {
431 assert(false && "Unexpected referrer region type.");
432 return std::nullopt;
433 }
434 }
435 assert(Referrer);
436 assert(Referrer->canPrintPretty());
437
438 std::string buf;
439 llvm::raw_string_ostream os(buf);
440 os << ReferrerMemorySpace << " variable ";
441 Referrer->printPretty(os);
442 return buf;
443}
444
445/// Check whether \p Region refers to a freshly minted symbol after an opaque
446/// function call.
447static bool isInvalidatedSymbolRegion(const MemRegion *Region) {
448 const auto *SymReg = Region->getAs<SymbolicRegion>();
449 if (!SymReg)
450 return false;
451 SymbolRef Symbol = SymReg->getSymbol();
452
453 const auto *DerS = dyn_cast<SymbolDerived>(Val: Symbol);
454 return DerS && isa_and_nonnull<SymbolConjured>(Val: DerS->getParentSymbol());
455}
456
457void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS,
458 CheckerContext &Ctx) const {
459 if (!ChecksEnabled[CK_StackAddrEscapeChecker])
460 return;
461
462 ExplodedNode *Node = Ctx.getPredecessor();
463
464 bool ExitingTopFrame =
465 Ctx.getPredecessor()->getLocationContext()->inTopFrame();
466
467 if (ExitingTopFrame &&
468 Node->getLocation().getTag() == ExprEngine::cleanupNodeTag() &&
469 Node->getFirstPred()) {
470 // When finishing analysis of a top-level function, engine proactively
471 // removes dead symbols thus preventing this checker from looking through
472 // the output parameters. Take 1 step back, to the node where these symbols
473 // and their bindings are still present
474 Node = Node->getFirstPred();
475 }
476
477 // Iterate over all bindings to global variables and see if it contains
478 // a memory region in the stack space.
479 class CallBack : public StoreManager::BindingsHandler {
480 private:
481 CheckerContext &Ctx;
482 ProgramStateRef State;
483 const StackFrameContext *PoppedFrame;
484 const bool TopFrame;
485
486 /// Look for stack variables referring to popped stack variables.
487 /// Returns true only if it found some dangling stack variables
488 /// referred by an other stack variable from different stack frame.
489 bool checkForDanglingStackVariable(const MemRegion *Referrer,
490 const MemRegion *Referred) {
491 const auto *ReferrerMemSpace =
492 getStackOrGlobalSpaceRegion(State, R: Referrer);
493 const auto *ReferredMemSpace =
494 Referred->getMemorySpaceAs<StackSpaceRegion>(State);
495
496 if (!ReferrerMemSpace || !ReferredMemSpace)
497 return false;
498
499 const auto *ReferrerStackSpace =
500 ReferrerMemSpace->getAs<StackSpaceRegion>();
501
502 if (!ReferrerStackSpace)
503 return false;
504
505 if (const auto *ReferredFrame = ReferredMemSpace->getStackFrame();
506 ReferredFrame != PoppedFrame) {
507 return false;
508 }
509
510 if (ReferrerStackSpace->getStackFrame()->isParentOf(LC: PoppedFrame)) {
511 V.emplace_back(Args&: Referrer, Args&: Referred);
512 return true;
513 }
514 if (isa<StackArgumentsSpaceRegion>(Val: ReferrerMemSpace) &&
515 // Not a simple ptr (int*) but something deeper, e.g. int**
516 isa<SymbolicRegion>(Val: Referrer->getBaseRegion()) &&
517 ReferrerStackSpace->getStackFrame() == PoppedFrame && TopFrame) {
518 // Output parameter of a top-level function
519 V.emplace_back(Args&: Referrer, Args&: Referred);
520 return true;
521 }
522 return false;
523 }
524
525 // Keep track of the variables that were invalidated through an opaque
526 // function call. Even if the initial values of such variables were bound to
527 // an address of a local variable, we cannot claim anything now, at the
528 // function exit, so skip them to avoid false positives.
529 void recordInInvalidatedRegions(const MemRegion *Region) {
530 if (isInvalidatedSymbolRegion(Region))
531 ExcludedRegions.insert(Ptr: getOriginBaseRegion(Reg: Region));
532 }
533
534 public:
535 SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V;
536 // ExcludedRegions are skipped from reporting.
537 // I.e., if a referrer in this set, skip the related bug report.
538 // It is useful to avoid false positive for the variables that were
539 // reset to a conjured value after an opaque function call.
540 llvm::SmallPtrSet<const MemRegion *, 4> ExcludedRegions;
541
542 CallBack(CheckerContext &CC, bool TopFrame)
543 : Ctx(CC), State(CC.getState()), PoppedFrame(CC.getStackFrame()),
544 TopFrame(TopFrame) {}
545
546 bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region,
547 SVal Val) override {
548 recordInInvalidatedRegions(Region);
549 const MemRegion *VR = Val.getAsRegion();
550 if (!VR)
551 return true;
552
553 if (checkForDanglingStackVariable(Referrer: Region, Referred: VR))
554 return true;
555
556 // Check the globals for the same.
557 if (!isa_and_nonnull<GlobalsSpaceRegion>(
558 Val: getStackOrGlobalSpaceRegion(State, R: Region)))
559 return true;
560
561 if (VR) {
562 if (const auto *S = VR->getMemorySpaceAs<StackSpaceRegion>(State);
563 S && !isNotInCurrentFrame(MS: S, C&: Ctx)) {
564 V.emplace_back(Args&: Region, Args&: VR);
565 }
566 }
567 return true;
568 }
569 };
570
571 CallBack Cb(Ctx, ExitingTopFrame);
572 ProgramStateRef State = Node->getState();
573 State->getStateManager().getStoreManager().iterBindings(store: State->getStore(),
574 f&: Cb);
575
576 if (Cb.V.empty())
577 return;
578
579 // Generate an error node.
580 ExplodedNode *N = Ctx.generateNonFatalErrorNode(State, Pred: Node);
581 if (!N)
582 return;
583
584 if (!BT_stackleak)
585 BT_stackleak =
586 std::make_unique<BugType>(args: CheckNames[CK_StackAddrEscapeChecker],
587 args: "Stack address leaks outside of stack frame");
588
589 for (const auto &P : Cb.V) {
590 const MemRegion *Referrer = P.first->getBaseRegion();
591 const MemRegion *Referred = P.second;
592 if (Cb.ExcludedRegions.contains(Ptr: getOriginBaseRegion(Reg: Referrer))) {
593 continue;
594 }
595
596 // Generate a report for this bug.
597 const StringRef CommonSuffix =
598 " upon returning to the caller. This will be a dangling reference";
599 SmallString<128> Buf;
600 llvm::raw_svector_ostream Out(Buf);
601 const SourceRange Range = genName(os&: Out, R: Referred, Ctx&: Ctx.getASTContext());
602
603 if (isa<CXXTempObjectRegion, CXXLifetimeExtendedObjectRegion>(Val: Referrer)) {
604 Out << " is still referred to by a temporary object on the stack"
605 << CommonSuffix;
606 auto Report =
607 std::make_unique<PathSensitiveBugReport>(args&: *BT_stackleak, args: Out.str(), args&: N);
608 if (Range.isValid())
609 Report->addRange(R: Range);
610 Ctx.emitReport(R: std::move(Report));
611 return;
612 }
613
614 auto ReferrerVariable = printReferrer(State, Referrer);
615 if (!ReferrerVariable) {
616 continue;
617 }
618
619 Out << " is still referred to by the " << *ReferrerVariable << CommonSuffix;
620 auto Report =
621 std::make_unique<PathSensitiveBugReport>(args&: *BT_stackleak, args: Out.str(), args&: N);
622 if (Range.isValid())
623 Report->addRange(R: Range);
624
625 Ctx.emitReport(R: std::move(Report));
626 }
627}
628
629void ento::registerStackAddrEscapeBase(CheckerManager &mgr) {
630 mgr.registerChecker<StackAddrEscapeChecker>();
631}
632
633bool ento::shouldRegisterStackAddrEscapeBase(const CheckerManager &mgr) {
634 return true;
635}
636
637#define REGISTER_CHECKER(name) \
638 void ento::register##name(CheckerManager &Mgr) { \
639 StackAddrEscapeChecker *Chk = Mgr.getChecker<StackAddrEscapeChecker>(); \
640 Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \
641 Chk->CheckNames[StackAddrEscapeChecker::CK_##name] = \
642 Mgr.getCurrentCheckerName(); \
643 } \
644 \
645 bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; }
646
647REGISTER_CHECKER(StackAddrEscapeChecker)
648REGISTER_CHECKER(StackAddrAsyncEscapeChecker)
649