1//===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines a generic engine for intraprocedural, path-sensitive,
10// dataflow analysis via graph reachability engine.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15#include "PrettyStackTraceLocationContext.h"
16#include "clang/AST/Expr.h"
17#include "clang/AST/ExprCXX.h"
18#include "clang/AST/Stmt.h"
19#include "clang/AST/StmtCXX.h"
20#include "clang/Analysis/AnalysisDeclContext.h"
21#include "clang/Analysis/CFG.h"
22#include "clang/Analysis/ProgramPoint.h"
23#include "clang/Basic/LLVM.h"
24#include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
25#include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
26#include "clang/StaticAnalyzer/Core/PathSensitive/EntryPointStats.h"
27#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
28#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
29#include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
30#include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
31#include "llvm/Support/ErrorHandling.h"
32#include "llvm/Support/FormatVariadic.h"
33#include "llvm/Support/TimeProfiler.h"
34#include <algorithm>
35#include <cassert>
36#include <memory>
37#include <optional>
38#include <utility>
39
40using namespace clang;
41using namespace ento;
42
43#define DEBUG_TYPE "CoreEngine"
44
45STAT_COUNTER(NumSteps, "The # of steps executed.");
46STAT_COUNTER(NumSTUSteps, "The # of STU steps executed.");
47STAT_COUNTER(NumCTUSteps, "The # of CTU steps executed.");
48ALWAYS_ENABLED_STATISTIC(NumReachedMaxSteps,
49 "The # of times we reached the max number of steps.");
50STAT_COUNTER(NumPathsExplored, "The # of paths explored by the analyzer.");
51
52//===----------------------------------------------------------------------===//
53// Core analysis engine.
54//===----------------------------------------------------------------------===//
55
56static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
57 switch (Opts.getExplorationStrategy()) {
58 case ExplorationStrategyKind::DFS:
59 return WorkList::makeDFS();
60 case ExplorationStrategyKind::BFS:
61 return WorkList::makeBFS();
62 case ExplorationStrategyKind::BFSBlockDFSContents:
63 return WorkList::makeBFSBlockDFSContents();
64 case ExplorationStrategyKind::UnexploredFirst:
65 return WorkList::makeUnexploredFirst();
66 case ExplorationStrategyKind::UnexploredFirstQueue:
67 return WorkList::makeUnexploredFirstPriorityQueue();
68 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
69 return WorkList::makeUnexploredFirstPriorityLocationQueue();
70 }
71 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
72}
73
74CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
75 AnalyzerOptions &Opts)
76 : ExprEng(exprengine), WList(generateWorkList(Opts)),
77 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr),
78 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
79
80void CoreEngine::setBlockCounter(BlockCounter C) {
81 WList->setBlockCounter(C);
82 if (CTUWList)
83 CTUWList->setBlockCounter(C);
84}
85
86/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
87bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps,
88 ProgramStateRef InitState) {
89 if (G.empty()) {
90 assert(!G.getRoot() && "empty graph must not have a root node");
91 // Initialize the analysis by constructing the root if there are no nodes.
92
93 const CFGBlock *Entry = &(L->getCFG()->getEntry());
94
95 assert(Entry->empty() && "Entry block must be empty.");
96
97 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
98
99 // Mark the entry block as visited.
100 FunctionSummaries->markVisitedBasicBlock(ID: Entry->getBlockID(),
101 D: L->getDecl(),
102 TotalIDs: L->getCFG()->getNumBlockIDs());
103
104 // Get the solitary successor.
105 const CFGBlock *Succ = *(Entry->succ_begin());
106
107 // Construct an edge representing the
108 // starting location in the function.
109 BlockEdge StartLoc(Entry, Succ, L);
110
111 // Set the current block counter to being empty.
112 setBlockCounter(BCounterFactory.GetEmptyCounter());
113
114 if (!InitState)
115 InitState = ExprEng.getInitialState(InitLoc: L);
116
117 bool IsNew;
118 ExplodedNode *Node = G.getNode(L: StartLoc, State: InitState, IsSink: false, IsNew: &IsNew);
119 assert(IsNew);
120 G.designateAsRoot(V: Node);
121
122 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
123 ExplodedNodeSet DstBegin;
124 ExprEng.processBeginOfFunction(BC&: BuilderCtx, Pred: Node, Dst&: DstBegin, L: StartLoc);
125
126 enqueue(Set&: DstBegin);
127 }
128
129 // Check if we have a steps limit
130 bool UnlimitedSteps = MaxSteps == 0;
131
132 // Cap our pre-reservation in the event that the user specifies
133 // a very large number of maximum steps.
134 const unsigned PreReservationCap = 4000000;
135 if(!UnlimitedSteps)
136 G.reserve(NodeCount: std::min(a: MaxSteps, b: PreReservationCap));
137
138 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) {
139 unsigned Steps = MaxSteps;
140 while (WList->hasWork()) {
141 if (!UnlimitedSteps) {
142 if (Steps == 0) {
143 NumReachedMaxSteps++;
144 break;
145 }
146 --Steps;
147 }
148
149 NumSteps++;
150
151 const WorkListUnit &WU = WList->dequeue();
152
153 // Set the current block counter.
154 setBlockCounter(WU.getBlockCounter());
155
156 // Retrieve the node.
157 ExplodedNode *Node = WU.getNode();
158
159 dispatchWorkItem(Pred: Node, Loc: Node->getLocation(), WU);
160 }
161 return MaxSteps - Steps;
162 };
163 const unsigned STUSteps = ProcessWList(MaxSteps);
164
165 if (CTUWList) {
166 NumSTUSteps += STUSteps;
167 const unsigned MinCTUSteps =
168 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin;
169 const unsigned Pct =
170 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage;
171 unsigned MaxCTUSteps = std::max(a: STUSteps * Pct / 100, b: MinCTUSteps);
172
173 WList = std::move(CTUWList);
174 const unsigned CTUSteps = ProcessWList(MaxCTUSteps);
175 NumCTUSteps += CTUSteps;
176 }
177
178 ExprEng.processEndWorklist();
179 return WList->hasWork();
180}
181
182static std::string timeTraceScopeName(const ProgramPoint &Loc) {
183 if (llvm::timeTraceProfilerEnabled()) {
184 return llvm::formatv(Fmt: "dispatchWorkItem {0}",
185 Vals: ProgramPoint::getProgramPointKindName(K: Loc.getKind()))
186 .str();
187 }
188 return "";
189}
190
191static llvm::TimeTraceMetadata timeTraceMetadata(const ExplodedNode *Pred,
192 const ProgramPoint &Loc) {
193 // If time-trace profiler is not enabled, this function is never called.
194 assert(llvm::timeTraceProfilerEnabled());
195 std::string Detail = "";
196 if (const auto SP = Loc.getAs<StmtPoint>()) {
197 if (const Stmt *S = SP->getStmt())
198 Detail = S->getStmtClassName();
199 }
200 auto SLoc = Loc.getSourceLocation();
201 if (!SLoc)
202 return llvm::TimeTraceMetadata{.Detail: std::move(Detail), .File: ""};
203 const auto &SM = Pred->getLocationContext()
204 ->getAnalysisDeclContext()
205 ->getASTContext()
206 .getSourceManager();
207 auto Line = SM.getPresumedLineNumber(Loc: *SLoc);
208 auto Fname = SM.getFilename(SpellingLoc: *SLoc);
209 return llvm::TimeTraceMetadata{.Detail: std::move(Detail), .File: Fname.str(),
210 .Line: static_cast<int>(Line)};
211}
212
213void CoreEngine::dispatchWorkItem(ExplodedNode *Pred, ProgramPoint Loc,
214 const WorkListUnit &WU) {
215 llvm::TimeTraceScope tcs{timeTraceScopeName(Loc), [Loc, Pred]() {
216 return timeTraceMetadata(Pred, Loc);
217 }};
218 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
219 // Dispatch on the location type.
220 switch (Loc.getKind()) {
221 case ProgramPoint::BlockEdgeKind:
222 HandleBlockEdge(E: Loc.castAs<BlockEdge>(), Pred);
223 break;
224
225 case ProgramPoint::BlockEntranceKind:
226 HandleBlockEntrance(E: Loc.castAs<BlockEntrance>(), Pred);
227 break;
228
229 case ProgramPoint::BlockExitKind:
230 assert(false && "BlockExit location never occur in forward analysis.");
231 break;
232
233 case ProgramPoint::CallEnterKind:
234 HandleCallEnter(CE: Loc.castAs<CallEnter>(), Pred);
235 break;
236
237 case ProgramPoint::CallExitBeginKind:
238 ExprEng.processCallExit(Pred);
239 break;
240
241 case ProgramPoint::EpsilonKind: {
242 assert(Pred->hasSinglePred() &&
243 "Assume epsilon has exactly one predecessor by construction");
244 ExplodedNode *PNode = Pred->getFirstPred();
245 dispatchWorkItem(Pred, Loc: PNode->getLocation(), WU);
246 break;
247 }
248 default:
249 assert(Loc.getAs<PostStmt>() ||
250 Loc.getAs<PostInitializer>() ||
251 Loc.getAs<PostImplicitCall>() ||
252 Loc.getAs<CallExitEnd>() ||
253 Loc.getAs<LoopExit>() ||
254 Loc.getAs<PostAllocatorCall>());
255 HandlePostStmt(B: WU.getBlock(), StmtIdx: WU.getIndex(), Pred);
256 break;
257 }
258}
259
260void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
261 const CFGBlock *Blk = L.getDst();
262 NodeBuilderContext BuilderCtx(*this, Blk, Pred);
263
264 // Mark this block as visited.
265 const LocationContext *LC = Pred->getLocationContext();
266 FunctionSummaries->markVisitedBasicBlock(ID: Blk->getBlockID(),
267 D: LC->getDecl(),
268 TotalIDs: LC->getCFG()->getNumBlockIDs());
269
270 // Display a prunable path note to the user if it's a virtual bases branch
271 // and we're taking the path that skips virtual base constructors.
272 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
273 L.getDst() == *L.getSrc()->succ_begin()) {
274 ProgramPoint P = L.withTag(tag: getDataTags().make<NoteTag>(
275 ConstructorArgs: [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
276 // TODO: Just call out the name of the most derived class
277 // when we know it.
278 return "Virtual base initialization skipped because "
279 "it has already been handled by the most derived class";
280 },
281 /*IsPrunable=*/ConstructorArgs: true));
282 // Perform the transition.
283 ExplodedNodeSet Dst;
284 NodeBuilder Bldr(Pred, Dst, BuilderCtx);
285 Pred = Bldr.generateNode(PP: P, State: Pred->getState(), Pred);
286 if (!Pred)
287 return;
288 }
289
290 // Check if we are entering the EXIT block.
291 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
292 assert(L.getLocationContext()->getCFG()->getExit().empty() &&
293 "EXIT block cannot contain Stmts.");
294
295 // Get return statement..
296 const ReturnStmt *RS = nullptr;
297 if (!L.getSrc()->empty()) {
298 CFGElement LastElement = L.getSrc()->back();
299 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
300 RS = dyn_cast<ReturnStmt>(Val: LastStmt->getStmt());
301 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor =
302 LastElement.getAs<CFGAutomaticObjDtor>()) {
303 RS = dyn_cast<ReturnStmt>(Val: AutoDtor->getTriggerStmt());
304 }
305 }
306
307 ExplodedNodeSet CheckerNodes;
308 BlockEntrance BE(L.getSrc(), L.getDst(), Pred->getLocationContext());
309 ExprEng.runCheckersForBlockEntrance(BldCtx: BuilderCtx, Entrance: BE, Pred, Dst&: CheckerNodes);
310
311 // Process the final state transition.
312 for (ExplodedNode *P : CheckerNodes) {
313 ExprEng.processEndOfFunction(BC&: BuilderCtx, Pred: P, RS);
314 }
315
316 // This path is done. Don't enqueue any more nodes.
317 return;
318 }
319
320 // Call into the ExprEngine to process entering the CFGBlock.
321 BlockEntrance BE(L.getSrc(), L.getDst(), Pred->getLocationContext());
322 ExplodedNodeSet DstNodes;
323 NodeBuilder Builder(Pred, DstNodes, BuilderCtx);
324 ExprEng.processCFGBlockEntrance(L, BE, Builder, Pred);
325
326 // Auto-generate a node.
327 if (!Builder.hasGeneratedNodes()) {
328 Builder.generateNode(PP: BE, State: Pred->State, Pred);
329 }
330
331 ExplodedNodeSet CheckerNodes;
332 for (auto *N : DstNodes) {
333 ExprEng.runCheckersForBlockEntrance(BldCtx: BuilderCtx, Entrance: BE, Pred: N, Dst&: CheckerNodes);
334 }
335
336 // Enqueue nodes onto the worklist.
337 enqueue(Set&: CheckerNodes);
338}
339
340void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
341 ExplodedNode *Pred) {
342 // Increment the block counter.
343 const LocationContext *LC = Pred->getLocationContext();
344 unsigned BlockId = L.getBlock()->getBlockID();
345 BlockCounter Counter = WList->getBlockCounter();
346 Counter = BCounterFactory.IncrementCount(BC: Counter, CallSite: LC->getStackFrame(),
347 BlockID: BlockId);
348 setBlockCounter(Counter);
349
350 // Process the entrance of the block.
351 if (std::optional<CFGElement> E = L.getFirstElement()) {
352 NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
353 ExprEng.processCFGElement(E: *E, Pred, StmtIdx: 0, Ctx: &Ctx);
354 } else
355 HandleBlockExit(B: L.getBlock(), Pred);
356}
357
358void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
359 if (const Stmt *Term = B->getTerminatorStmt()) {
360 switch (Term->getStmtClass()) {
361 default:
362 llvm_unreachable("Analysis for this terminator not implemented.");
363
364 case Stmt::CXXBindTemporaryExprClass:
365 HandleCleanupTemporaryBranch(
366 BTE: cast<CXXBindTemporaryExpr>(Val: Term), B, Pred);
367 return;
368
369 // Model static initializers.
370 case Stmt::DeclStmtClass:
371 HandleStaticInit(DS: cast<DeclStmt>(Val: Term), B, Pred);
372 return;
373
374 case Stmt::BinaryOperatorClass: // '&&' and '||'
375 HandleBranch(Cond: cast<BinaryOperator>(Val: Term)->getLHS(), Term, B, Pred);
376 return;
377
378 case Stmt::BinaryConditionalOperatorClass:
379 case Stmt::ConditionalOperatorClass:
380 HandleBranch(Cond: cast<AbstractConditionalOperator>(Val: Term)->getCond(),
381 Term, B, Pred);
382 return;
383
384 // FIXME: Use constant-folding in CFG construction to simplify this
385 // case.
386
387 case Stmt::ChooseExprClass:
388 HandleBranch(Cond: cast<ChooseExpr>(Val: Term)->getCond(), Term, B, Pred);
389 return;
390
391 case Stmt::CXXTryStmtClass:
392 // Generate a node for each of the successors.
393 // Our logic for EH analysis can certainly be improved.
394 for (CFGBlock::const_succ_iterator it = B->succ_begin(),
395 et = B->succ_end(); it != et; ++it) {
396 if (const CFGBlock *succ = *it) {
397 generateNode(Loc: BlockEdge(B, succ, Pred->getLocationContext()),
398 State: Pred->State, Pred);
399 }
400 }
401 return;
402
403 case Stmt::DoStmtClass:
404 HandleBranch(Cond: cast<DoStmt>(Val: Term)->getCond(), Term, B, Pred);
405 return;
406
407 case Stmt::CXXForRangeStmtClass:
408 HandleBranch(Cond: cast<CXXForRangeStmt>(Val: Term)->getCond(), Term, B, Pred);
409 return;
410
411 case Stmt::ForStmtClass:
412 HandleBranch(Cond: cast<ForStmt>(Val: Term)->getCond(), Term, B, Pred);
413 return;
414
415 case Stmt::SEHLeaveStmtClass:
416 case Stmt::ContinueStmtClass:
417 case Stmt::BreakStmtClass:
418 case Stmt::GotoStmtClass:
419 break;
420
421 case Stmt::IfStmtClass:
422 HandleBranch(Cond: cast<IfStmt>(Val: Term)->getCond(), Term, B, Pred);
423 return;
424
425 case Stmt::IndirectGotoStmtClass: {
426 // Only 1 successor: the indirect goto dispatch block.
427 assert(B->succ_size() == 1);
428 NodeBuilderContext Ctx(*this, B, Pred);
429 ExplodedNodeSet Dst;
430 IndirectGotoNodeBuilder Builder(
431 Dst, Ctx, cast<IndirectGotoStmt>(Val: Term)->getTarget(),
432 *(B->succ_begin()));
433
434 ExprEng.processIndirectGoto(Builder, Pred);
435 // Enqueue the new frontier onto the worklist.
436 enqueue(Set&: Dst);
437 return;
438 }
439
440 case Stmt::ObjCForCollectionStmtClass:
441 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
442 //
443 // (1) inside a basic block, which represents the binding of the
444 // 'element' variable to a value.
445 // (2) in a terminator, which represents the branch.
446 //
447 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
448 // whether or not collection contains any more elements. We cannot
449 // just test to see if the element is nil because a container can
450 // contain nil elements.
451 HandleBranch(Cond: Term, Term, B, Pred);
452 return;
453
454 case Stmt::SwitchStmtClass: {
455 NodeBuilderContext Ctx(*this, B, Pred);
456 ExplodedNodeSet Dst;
457 ExprEng.processSwitch(BC&: Ctx, Switch: cast<SwitchStmt>(Val: Term), Pred, Dst);
458 // Enqueue the new frontier onto the worklist.
459 enqueue(Set&: Dst);
460 return;
461 }
462
463 case Stmt::WhileStmtClass:
464 HandleBranch(Cond: cast<WhileStmt>(Val: Term)->getCond(), Term, B, Pred);
465 return;
466
467 case Stmt::GCCAsmStmtClass:
468 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
469 // TODO: Handle jumping to labels
470 return;
471 }
472 }
473
474 if (B->getTerminator().isVirtualBaseBranch()) {
475 HandleVirtualBaseBranch(B, Pred);
476 return;
477 }
478
479 assert(B->succ_size() == 1 &&
480 "Blocks with no terminator should have at most 1 successor.");
481
482 generateNode(Loc: BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
483 State: Pred->State, Pred);
484}
485
486void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
487 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
488 ExprEng.processCallEnter(BC&: BuilderCtx, CE, Pred);
489}
490
491void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
492 const CFGBlock * B, ExplodedNode *Pred) {
493 assert(B->succ_size() == 2);
494 NodeBuilderContext Ctx(*this, B, Pred);
495 ExplodedNodeSet Dst;
496 ExprEng.processBranch(Condition: Cond, BuilderCtx&: Ctx, Pred, Dst, DstT: *(B->succ_begin()),
497 DstF: *(B->succ_begin() + 1),
498 IterationsCompletedInLoop: getCompletedIterationCount(B, Pred));
499 // Enqueue the new frontier onto the worklist.
500 enqueue(Set&: Dst);
501}
502
503void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
504 const CFGBlock *B,
505 ExplodedNode *Pred) {
506 assert(B->succ_size() == 2);
507 NodeBuilderContext Ctx(*this, B, Pred);
508 ExplodedNodeSet Dst;
509 ExprEng.processCleanupTemporaryBranch(BTE, BldCtx&: Ctx, Pred, Dst, DstT: *(B->succ_begin()),
510 DstF: *(B->succ_begin() + 1));
511 // Enqueue the new frontier onto the worklist.
512 enqueue(Set&: Dst);
513}
514
515void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
516 ExplodedNode *Pred) {
517 assert(B->succ_size() == 2);
518 NodeBuilderContext Ctx(*this, B, Pred);
519 ExplodedNodeSet Dst;
520 ExprEng.processStaticInitializer(DS, BuilderCtx&: Ctx, Pred, Dst,
521 DstT: *(B->succ_begin()), DstF: *(B->succ_begin()+1));
522 // Enqueue the new frontier onto the worklist.
523 enqueue(Set&: Dst);
524}
525
526void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
527 ExplodedNode *Pred) {
528 assert(B);
529 assert(!B->empty());
530
531 // We no-op by skipping any FullExprCleanup
532 while (StmtIdx < B->size() &&
533 (*B)[StmtIdx].getKind() == CFGElement::FullExprCleanup) {
534 StmtIdx++;
535 }
536
537 if (StmtIdx == B->size())
538 HandleBlockExit(B, Pred);
539 else {
540 NodeBuilderContext Ctx(*this, B, Pred);
541 ExprEng.processCFGElement(E: (*B)[StmtIdx], Pred, StmtIdx, Ctx: &Ctx);
542 }
543}
544
545void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
546 ExplodedNode *Pred) {
547 const LocationContext *LCtx = Pred->getLocationContext();
548 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
549 Val: LCtx->getStackFrame()->getCallSite())) {
550 switch (CallerCtor->getConstructionKind()) {
551 case CXXConstructionKind::NonVirtualBase:
552 case CXXConstructionKind::VirtualBase: {
553 BlockEdge Loc(B, *B->succ_begin(), LCtx);
554 HandleBlockEdge(L: Loc, Pred);
555 return;
556 }
557 default:
558 break;
559 }
560 }
561
562 // We either don't see a parent stack frame because we're in the top frame,
563 // or the parent stack frame doesn't initialize our virtual bases.
564 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
565 HandleBlockEdge(L: Loc, Pred);
566}
567
568ExplodedNode *CoreEngine::makeNode(const ProgramPoint &Loc,
569 ProgramStateRef State, ExplodedNode *Pred,
570 bool MarkAsSink) const {
571 MarkAsSink = MarkAsSink || State->isPosteriorlyOverconstrained();
572
573 bool IsNew;
574 ExplodedNode *N = G.getNode(L: Loc, State, IsSink: MarkAsSink, IsNew: &IsNew);
575 N->addPredecessor(V: Pred, G);
576
577 return IsNew ? N : nullptr;
578}
579
580/// generateNode - Utility method to generate nodes, hook up successors,
581/// and add nodes to the worklist.
582/// TODO: This and other similar methods should call CoreEngine::makeNode()
583/// instead of duplicating its logic. This would also fix that currently these
584/// can generate non-sink nodes with PosteriorlyOverconstrained state.
585void CoreEngine::generateNode(const ProgramPoint &Loc,
586 ProgramStateRef State,
587 ExplodedNode *Pred) {
588 assert(Pred);
589 bool IsNew;
590 ExplodedNode *Node = G.getNode(L: Loc, State, IsSink: false, IsNew: &IsNew);
591
592 Node->addPredecessor(V: Pred, G); // Link 'Node' with its predecessor.
593
594 // Only add 'Node' to the worklist if it was freshly generated.
595 if (IsNew) WList->enqueue(N: Node);
596}
597
598void CoreEngine::enqueueStmtNode(ExplodedNode *N,
599 const CFGBlock *Block, unsigned Idx) {
600 assert(Block);
601 assert(!N->isSink());
602
603 // Check if this node entered a callee.
604 if (N->getLocation().getAs<CallEnter>()) {
605 // Still use the index of the CallExpr. It's needed to create the callee
606 // StackFrameContext.
607 WList->enqueue(N, B: Block, idx: Idx);
608 return;
609 }
610
611 // Do not create extra nodes. Move to the next CFG element.
612 if (N->getLocation().getAs<PostInitializer>() ||
613 N->getLocation().getAs<PostImplicitCall>()||
614 N->getLocation().getAs<LoopExit>()) {
615 WList->enqueue(N, B: Block, idx: Idx+1);
616 return;
617 }
618
619 if (N->getLocation().getAs<EpsilonPoint>()) {
620 WList->enqueue(N, B: Block, idx: Idx);
621 return;
622 }
623
624 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
625 WList->enqueue(N, B: Block, idx: Idx+1);
626 return;
627 }
628
629 // At this point, we know we're processing a normal statement.
630 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
631 PostStmt Loc(CS.getStmt(), N->getLocationContext());
632
633 if (Loc == N->getLocation().withTag(tag: nullptr)) {
634 // Note: 'N' should be a fresh node because otherwise it shouldn't be
635 // a member of Deferred.
636 WList->enqueue(N, B: Block, idx: Idx+1);
637 return;
638 }
639
640 bool IsNew;
641 ExplodedNode *Succ = G.getNode(L: Loc, State: N->getState(), IsSink: false, IsNew: &IsNew);
642 Succ->addPredecessor(V: N, G);
643
644 if (IsNew)
645 WList->enqueue(N: Succ, B: Block, idx: Idx+1);
646}
647
648ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
649 const ReturnStmt *RS) {
650 // Create a CallExitBegin node and enqueue it.
651 const auto *LocCtx = cast<StackFrameContext>(Val: N->getLocationContext());
652
653 // Use the callee location context.
654 CallExitBegin Loc(LocCtx, RS);
655
656 bool isNew;
657 ExplodedNode *Node = G.getNode(L: Loc, State: N->getState(), IsSink: false, IsNew: &isNew);
658 Node->addPredecessor(V: N, G);
659 return isNew ? Node : nullptr;
660}
661
662std::optional<unsigned>
663CoreEngine::getCompletedIterationCount(const CFGBlock *B,
664 ExplodedNode *Pred) const {
665 const LocationContext *LC = Pred->getLocationContext();
666 BlockCounter Counter = WList->getBlockCounter();
667 unsigned BlockCount =
668 Counter.getNumVisited(CallSite: LC->getStackFrame(), BlockID: B->getBlockID());
669
670 const Stmt *Term = B->getTerminatorStmt();
671 if (isa<ForStmt, WhileStmt, CXXForRangeStmt>(Val: Term)) {
672 assert(BlockCount >= 1 &&
673 "Block count of currently analyzed block must be >= 1");
674 return BlockCount - 1;
675 }
676 if (isa<DoStmt>(Val: Term)) {
677 // In a do-while loop one iteration happens before the first evaluation of
678 // the loop condition, so we don't subtract one.
679 return BlockCount;
680 }
681 // ObjCForCollectionStmt is skipped intentionally because the current
682 // application of the iteration counts is not relevant for it.
683 return std::nullopt;
684}
685
686void CoreEngine::enqueue(ExplodedNodeSet &Set) {
687 for (const auto I : Set)
688 WList->enqueue(N: I);
689}
690
691void CoreEngine::enqueue(ExplodedNodeSet &Set,
692 const CFGBlock *Block, unsigned Idx) {
693 for (const auto I : Set)
694 enqueueStmtNode(N: I, Block, Idx);
695}
696
697void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
698 for (auto *I : Set) {
699 // If we are in an inlined call, generate CallExitBegin node.
700 if (I->getLocationContext()->getParent()) {
701 I = generateCallExitBeginNode(N: I, RS);
702 if (I)
703 WList->enqueue(N: I);
704 } else {
705 // TODO: We should run remove dead bindings here.
706 G.addEndOfPath(V: I);
707 NumPathsExplored++;
708 }
709 }
710}
711
712ExplodedNode *NodeBuilder::generateNode(const ProgramPoint &Loc,
713 ProgramStateRef State,
714 ExplodedNode *FromN, bool MarkAsSink) {
715 HasGeneratedNodes = true;
716 Frontier.erase(N: FromN);
717 ExplodedNode *N = C.getEngine().makeNode(Loc, State, Pred: FromN, MarkAsSink);
718
719 Frontier.Add(N);
720
721 return N;
722}
723
724ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
725 bool Branch,
726 ExplodedNode *NodePred) {
727 const CFGBlock *Dst = Branch ? DstT : DstF;
728
729 if (!Dst)
730 return nullptr;
731
732 ProgramPoint Loc =
733 BlockEdge(C.getBlock(), Dst, NodePred->getLocationContext());
734 ExplodedNode *Succ = NodeBuilder::generateNode(Loc, State, FromN: NodePred);
735 return Succ;
736}
737
738ExplodedNode *IndirectGotoNodeBuilder::generateNode(const CFGBlock *Block,
739 ProgramStateRef St,
740 ExplodedNode *Pred) {
741 BlockEdge BE(C.getBlock(), Block, Pred->getLocationContext());
742 return generateNode(PP: BE, State: St, Pred);
743}
744
745ExplodedNode *SwitchNodeBuilder::generateCaseStmtNode(const CFGBlock *Block,
746 ProgramStateRef St,
747 ExplodedNode *Pred) {
748 BlockEdge BE(C.getBlock(), Block, Pred->getLocationContext());
749 return generateNode(Loc: BE, State: St, FromN: Pred);
750}
751
752ExplodedNode *SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
753 ExplodedNode *Pred) {
754 // Get the block for the default case.
755 const CFGBlock *Src = C.getBlock();
756 assert(Src->succ_rbegin() != Src->succ_rend());
757 CFGBlock *DefaultBlock = *Src->succ_rbegin();
758
759 // Basic correctness check for default blocks that are unreachable and not
760 // caught by earlier stages.
761 if (!DefaultBlock)
762 return nullptr;
763
764 BlockEdge BE(Src, DefaultBlock, Pred->getLocationContext());
765 return generateNode(Loc: BE, State: St, FromN: Pred);
766}
767