1//===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines a generic engine for intraprocedural, path-sensitive,
10// dataflow analysis via graph reachability engine.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15#include "PrettyStackTraceLocationContext.h"
16#include "clang/AST/Expr.h"
17#include "clang/AST/ExprCXX.h"
18#include "clang/AST/Stmt.h"
19#include "clang/AST/StmtCXX.h"
20#include "clang/Analysis/AnalysisDeclContext.h"
21#include "clang/Analysis/CFG.h"
22#include "clang/Analysis/ProgramPoint.h"
23#include "clang/Basic/LLVM.h"
24#include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
25#include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
26#include "clang/StaticAnalyzer/Core/PathSensitive/EntryPointStats.h"
27#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
28#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
29#include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
30#include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
31#include "llvm/Support/ErrorHandling.h"
32#include "llvm/Support/FormatVariadic.h"
33#include "llvm/Support/TimeProfiler.h"
34#include <algorithm>
35#include <cassert>
36#include <memory>
37#include <optional>
38#include <utility>
39
40using namespace clang;
41using namespace ento;
42
43#define DEBUG_TYPE "CoreEngine"
44
45STAT_COUNTER(NumSteps, "The # of steps executed.");
46STAT_COUNTER(NumSTUSteps, "The # of STU steps executed.");
47STAT_COUNTER(NumCTUSteps, "The # of CTU steps executed.");
48ALWAYS_ENABLED_STATISTIC(NumReachedMaxSteps,
49 "The # of times we reached the max number of steps.");
50STAT_COUNTER(NumPathsExplored, "The # of paths explored by the analyzer.");
51
52//===----------------------------------------------------------------------===//
53// Core analysis engine.
54//===----------------------------------------------------------------------===//
55
56static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
57 switch (Opts.getExplorationStrategy()) {
58 case ExplorationStrategyKind::DFS:
59 return WorkList::makeDFS();
60 case ExplorationStrategyKind::BFS:
61 return WorkList::makeBFS();
62 case ExplorationStrategyKind::BFSBlockDFSContents:
63 return WorkList::makeBFSBlockDFSContents();
64 case ExplorationStrategyKind::UnexploredFirst:
65 return WorkList::makeUnexploredFirst();
66 case ExplorationStrategyKind::UnexploredFirstQueue:
67 return WorkList::makeUnexploredFirstPriorityQueue();
68 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
69 return WorkList::makeUnexploredFirstPriorityLocationQueue();
70 }
71 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
72}
73
74CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
75 AnalyzerOptions &Opts)
76 : ExprEng(exprengine), WList(generateWorkList(Opts)),
77 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr),
78 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
79
80void CoreEngine::setBlockCounter(BlockCounter C) {
81 WList->setBlockCounter(C);
82 if (CTUWList)
83 CTUWList->setBlockCounter(C);
84}
85
86/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
87bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps,
88 ProgramStateRef InitState) {
89 if (G.empty()) {
90 assert(!G.getRoot() && "empty graph must not have a root node");
91 // Initialize the analysis by constructing the root if there are no nodes.
92
93 const CFGBlock *Entry = &(L->getCFG()->getEntry());
94
95 assert(Entry->empty() && "Entry block must be empty.");
96
97 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
98
99 // Mark the entry block as visited.
100 FunctionSummaries->markVisitedBasicBlock(ID: Entry->getBlockID(),
101 D: L->getDecl(),
102 TotalIDs: L->getCFG()->getNumBlockIDs());
103
104 // Get the solitary successor.
105 const CFGBlock *Succ = *(Entry->succ_begin());
106
107 // Construct an edge representing the
108 // starting location in the function.
109 BlockEdge StartLoc(Entry, Succ, L);
110
111 // Set the current block counter to being empty.
112 setBlockCounter(BCounterFactory.GetEmptyCounter());
113
114 if (!InitState)
115 InitState = ExprEng.getInitialState(InitLoc: L);
116
117 bool IsNew;
118 ExplodedNode *Node = G.getNode(L: StartLoc, State: InitState, IsSink: false, IsNew: &IsNew);
119 assert(IsNew);
120 G.designateAsRoot(V: Node);
121
122 ExprEng.setCurrLocationContextAndBlock(LC: Node->getLocationContext(), B: Succ);
123
124 ExplodedNodeSet DstBegin;
125 ExprEng.processBeginOfFunction(Pred: Node, Dst&: DstBegin, L: StartLoc);
126
127 enqueue(Set&: DstBegin);
128 }
129
130 // Check if we have a steps limit
131 bool UnlimitedSteps = MaxSteps == 0;
132
133 // Cap our pre-reservation in the event that the user specifies
134 // a very large number of maximum steps.
135 const unsigned PreReservationCap = 4000000;
136 if(!UnlimitedSteps)
137 G.reserve(NodeCount: std::min(a: MaxSteps, b: PreReservationCap));
138
139 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) {
140 unsigned Steps = MaxSteps;
141 while (WList->hasWork()) {
142 if (!UnlimitedSteps) {
143 if (Steps == 0) {
144 NumReachedMaxSteps++;
145 break;
146 }
147 --Steps;
148 }
149
150 NumSteps++;
151
152 const WorkListUnit &WU = WList->dequeue();
153
154 // Set the current block counter.
155 setBlockCounter(WU.getBlockCounter());
156
157 // Retrieve the node.
158 ExplodedNode *Node = WU.getNode();
159
160 dispatchWorkItem(Pred: Node, Loc: Node->getLocation(), WU);
161 }
162 return MaxSteps - Steps;
163 };
164 const unsigned STUSteps = ProcessWList(MaxSteps);
165
166 if (CTUWList) {
167 NumSTUSteps += STUSteps;
168 const unsigned MinCTUSteps =
169 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin;
170 const unsigned Pct =
171 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage;
172 unsigned MaxCTUSteps = std::max(a: STUSteps * Pct / 100, b: MinCTUSteps);
173
174 WList = std::move(CTUWList);
175 const unsigned CTUSteps = ProcessWList(MaxCTUSteps);
176 NumCTUSteps += CTUSteps;
177 }
178
179 ExprEng.processEndWorklist();
180 return WList->hasWork();
181}
182
183static std::string timeTraceScopeName(const ProgramPoint &Loc) {
184 if (llvm::timeTraceProfilerEnabled()) {
185 return llvm::formatv(Fmt: "dispatchWorkItem {0}",
186 Vals: ProgramPoint::getProgramPointKindName(K: Loc.getKind()))
187 .str();
188 }
189 return "";
190}
191
192static llvm::TimeTraceMetadata timeTraceMetadata(const ExplodedNode *Pred,
193 const ProgramPoint &Loc) {
194 // If time-trace profiler is not enabled, this function is never called.
195 assert(llvm::timeTraceProfilerEnabled());
196 std::string Detail = "";
197 if (const auto SP = Loc.getAs<StmtPoint>()) {
198 if (const Stmt *S = SP->getStmt())
199 Detail = S->getStmtClassName();
200 }
201 auto SLoc = Loc.getSourceLocation();
202 if (!SLoc)
203 return llvm::TimeTraceMetadata{.Detail: std::move(Detail), .File: ""};
204 const auto &SM = Pred->getLocationContext()
205 ->getAnalysisDeclContext()
206 ->getASTContext()
207 .getSourceManager();
208 auto Line = SM.getPresumedLineNumber(Loc: *SLoc);
209 auto Fname = SM.getFilename(SpellingLoc: *SLoc);
210 return llvm::TimeTraceMetadata{.Detail: std::move(Detail), .File: Fname.str(),
211 .Line: static_cast<int>(Line)};
212}
213
214void CoreEngine::dispatchWorkItem(ExplodedNode *Pred, ProgramPoint Loc,
215 const WorkListUnit &WU) {
216 llvm::TimeTraceScope tcs{timeTraceScopeName(Loc), [Loc, Pred]() {
217 return timeTraceMetadata(Pred, Loc);
218 }};
219 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
220
221 // This work item is not necessarily related to the previous one, so
222 // the old current LocationContext and Block is no longer relevant.
223 // The new current LocationContext and Block should be set soon, but this
224 // guarantees that buggy access before that will trigger loud crashes instead
225 // of silently using stale data.
226 ExprEng.resetCurrLocationContextAndBlock();
227
228 // Dispatch on the location type.
229 switch (Loc.getKind()) {
230 case ProgramPoint::BlockEdgeKind:
231 HandleBlockEdge(E: Loc.castAs<BlockEdge>(), Pred);
232 break;
233
234 case ProgramPoint::BlockEntranceKind:
235 HandleBlockEntrance(E: Loc.castAs<BlockEntrance>(), Pred);
236 break;
237
238 case ProgramPoint::BlockExitKind:
239 assert(false && "BlockExit location never occur in forward analysis.");
240 break;
241
242 case ProgramPoint::CallEnterKind:
243 HandleCallEnter(CE: Loc.castAs<CallEnter>(), Pred);
244 break;
245
246 case ProgramPoint::CallExitBeginKind:
247 ExprEng.processCallExit(Pred);
248 break;
249
250 case ProgramPoint::EpsilonKind: {
251 assert(Pred->hasSinglePred() &&
252 "Assume epsilon has exactly one predecessor by construction");
253 ExplodedNode *PNode = Pred->getFirstPred();
254 dispatchWorkItem(Pred, Loc: PNode->getLocation(), WU);
255 break;
256 }
257 default:
258 assert(Loc.getAs<PostStmt>() ||
259 Loc.getAs<PostInitializer>() ||
260 Loc.getAs<PostImplicitCall>() ||
261 Loc.getAs<CallExitEnd>() ||
262 Loc.getAs<LoopExit>() ||
263 Loc.getAs<PostAllocatorCall>());
264 HandlePostStmt(B: WU.getBlock(), StmtIdx: WU.getIndex(), Pred);
265 break;
266 }
267}
268
269void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
270 const CFGBlock *Blk = L.getDst();
271 ExprEng.setCurrLocationContextAndBlock(LC: Pred->getLocationContext(), B: Blk);
272
273 // Mark this block as visited.
274 const LocationContext *LC = Pred->getLocationContext();
275 FunctionSummaries->markVisitedBasicBlock(ID: Blk->getBlockID(),
276 D: LC->getDecl(),
277 TotalIDs: LC->getCFG()->getNumBlockIDs());
278
279 // Display a prunable path note to the user if it's a virtual bases branch
280 // and we're taking the path that skips virtual base constructors.
281 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
282 L.getDst() == *L.getSrc()->succ_begin()) {
283 ProgramPoint P = L.withTag(tag: getDataTags().make<NoteTag>(
284 ConstructorArgs: [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
285 // TODO: Just call out the name of the most derived class
286 // when we know it.
287 return "Virtual base initialization skipped because "
288 "it has already been handled by the most derived class";
289 },
290 /*IsPrunable=*/ConstructorArgs: true));
291 // Perform the transition.
292 Pred = makeNode(Loc: P, State: Pred->getState(), Pred);
293 if (!Pred)
294 return;
295 }
296
297 // Check if we are entering the EXIT block.
298 const CFGBlock &ExitBlk = L.getLocationContext()->getCFG()->getExit();
299 if (Blk == &ExitBlk) {
300 assert(ExitBlk.empty() && "EXIT block cannot contain Stmts.");
301
302 // Get return statement..
303 const ReturnStmt *RS = nullptr;
304 if (!L.getSrc()->empty()) {
305 CFGElement LastElement = L.getSrc()->back();
306 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
307 RS = dyn_cast<ReturnStmt>(Val: LastStmt->getStmt());
308 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor =
309 LastElement.getAs<CFGAutomaticObjDtor>()) {
310 RS = dyn_cast<ReturnStmt>(Val: AutoDtor->getTriggerStmt());
311 }
312 }
313
314 ExplodedNodeSet CheckerNodes;
315 BlockEntrance BE(L.getSrc(), L.getDst(), Pred->getLocationContext());
316 ExprEng.runCheckersForBlockEntrance(Entrance: BE, Pred, Dst&: CheckerNodes);
317
318 // Process the final state transition.
319 for (ExplodedNode *P : CheckerNodes) {
320 ExprEng.processEndOfFunction(Pred: P, RS);
321 }
322
323 // This path is done. Don't enqueue any more nodes.
324 return;
325 }
326
327 // Call into the ExprEngine to process entering the CFGBlock.
328 BlockEntrance BE(L.getSrc(), L.getDst(), Pred->getLocationContext());
329 ExplodedNodeSet DstNodes;
330 NodeBuilder Builder(Pred, DstNodes, ExprEng.getBuilderContext());
331 ExprEng.processCFGBlockEntrance(L, BE, Builder, Pred);
332
333 // Auto-generate a node.
334 if (!Builder.hasGeneratedNodes()) {
335 Builder.generateNode(PP: BE, State: Pred->State, Pred);
336 }
337
338 ExplodedNodeSet CheckerNodes;
339 for (auto *N : DstNodes) {
340 ExprEng.runCheckersForBlockEntrance(Entrance: BE, Pred: N, Dst&: CheckerNodes);
341 }
342
343 // Enqueue nodes onto the worklist.
344 enqueue(Set&: CheckerNodes);
345}
346
347void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
348 ExplodedNode *Pred) {
349 // Increment the block counter.
350 const LocationContext *LC = Pred->getLocationContext();
351 unsigned BlockId = L.getBlock()->getBlockID();
352 BlockCounter Counter = WList->getBlockCounter();
353 Counter = BCounterFactory.IncrementCount(BC: Counter, CallSite: LC->getStackFrame(),
354 BlockID: BlockId);
355 setBlockCounter(Counter);
356
357 // Process the entrance of the block.
358 if (std::optional<CFGElement> E = L.getFirstElement()) {
359 ExprEng.setCurrLocationContextAndBlock(LC: Pred->getLocationContext(),
360 B: L.getBlock());
361 ExprEng.processCFGElement(E: *E, Pred, StmtIdx: 0);
362 } else
363 HandleBlockExit(B: L.getBlock(), Pred);
364}
365
366void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
367 if (const Stmt *Term = B->getTerminatorStmt()) {
368 ExprEng.setCurrLocationContextAndBlock(LC: Pred->getLocationContext(), B);
369
370 switch (Term->getStmtClass()) {
371 default:
372 llvm_unreachable("Analysis for this terminator not implemented.");
373
374 case Stmt::CXXBindTemporaryExprClass:
375 HandleCleanupTemporaryBranch(
376 BTE: cast<CXXBindTemporaryExpr>(Val: Term), B, Pred);
377 return;
378
379 // Model static initializers.
380 case Stmt::DeclStmtClass:
381 HandleStaticInit(DS: cast<DeclStmt>(Val: Term), B, Pred);
382 return;
383
384 case Stmt::BinaryOperatorClass: // '&&' and '||'
385 HandleBranch(Cond: cast<BinaryOperator>(Val: Term)->getLHS(), Term, B, Pred);
386 return;
387
388 case Stmt::BinaryConditionalOperatorClass:
389 case Stmt::ConditionalOperatorClass:
390 HandleBranch(Cond: cast<AbstractConditionalOperator>(Val: Term)->getCond(),
391 Term, B, Pred);
392 return;
393
394 // FIXME: Use constant-folding in CFG construction to simplify this
395 // case.
396
397 case Stmt::ChooseExprClass:
398 HandleBranch(Cond: cast<ChooseExpr>(Val: Term)->getCond(), Term, B, Pred);
399 return;
400
401 case Stmt::CXXTryStmtClass:
402 // Generate a node for each of the successors.
403 // Our logic for EH analysis can certainly be improved.
404 for (const CFGBlock *Succ : B->succs()) {
405 if (Succ) {
406 BlockEdge BE(B, Succ, Pred->getLocationContext());
407 if (ExplodedNode *N = makeNode(Loc: BE, State: Pred->State, Pred))
408 WList->enqueue(N);
409 }
410 }
411 return;
412
413 case Stmt::DoStmtClass:
414 HandleBranch(Cond: cast<DoStmt>(Val: Term)->getCond(), Term, B, Pred);
415 return;
416
417 case Stmt::CXXForRangeStmtClass:
418 HandleBranch(Cond: cast<CXXForRangeStmt>(Val: Term)->getCond(), Term, B, Pred);
419 return;
420
421 case Stmt::ForStmtClass:
422 HandleBranch(Cond: cast<ForStmt>(Val: Term)->getCond(), Term, B, Pred);
423 return;
424
425 case Stmt::SEHLeaveStmtClass:
426 case Stmt::ContinueStmtClass:
427 case Stmt::BreakStmtClass:
428 case Stmt::GotoStmtClass:
429 break;
430
431 case Stmt::IfStmtClass:
432 HandleBranch(Cond: cast<IfStmt>(Val: Term)->getCond(), Term, B, Pred);
433 return;
434
435 case Stmt::IndirectGotoStmtClass: {
436 // Only 1 successor: the indirect goto dispatch block.
437 assert(B->succ_size() == 1);
438 ExplodedNodeSet Dst;
439 IndirectGotoNodeBuilder Builder(
440 Dst, ExprEng.getBuilderContext(),
441 cast<IndirectGotoStmt>(Val: Term)->getTarget(), *(B->succ_begin()));
442
443 ExprEng.processIndirectGoto(Builder, Pred);
444 // Enqueue the new frontier onto the worklist.
445 enqueue(Set&: Dst);
446 return;
447 }
448
449 case Stmt::ObjCForCollectionStmtClass:
450 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
451 //
452 // (1) inside a basic block, which represents the binding of the
453 // 'element' variable to a value.
454 // (2) in a terminator, which represents the branch.
455 //
456 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
457 // whether or not collection contains any more elements. We cannot
458 // just test to see if the element is nil because a container can
459 // contain nil elements.
460 HandleBranch(Cond: Term, Term, B, Pred);
461 return;
462
463 case Stmt::SwitchStmtClass: {
464 ExplodedNodeSet Dst;
465 ExprEng.processSwitch(Switch: cast<SwitchStmt>(Val: Term), Pred, Dst);
466 // Enqueue the new frontier onto the worklist.
467 enqueue(Set&: Dst);
468 return;
469 }
470
471 case Stmt::WhileStmtClass:
472 HandleBranch(Cond: cast<WhileStmt>(Val: Term)->getCond(), Term, B, Pred);
473 return;
474
475 case Stmt::GCCAsmStmtClass:
476 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
477 // TODO: Handle jumping to labels
478 return;
479 }
480 }
481
482 if (B->getTerminator().isVirtualBaseBranch()) {
483 HandleVirtualBaseBranch(B, Pred);
484 return;
485 }
486
487 assert(B->succ_size() == 1 &&
488 "Blocks with no terminator should have at most 1 successor.");
489
490 BlockEdge BE(B, *(B->succ_begin()), Pred->getLocationContext());
491 if (ExplodedNode *N = makeNode(Loc: BE, State: Pred->State, Pred))
492 WList->enqueue(N);
493}
494
495void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
496 ExprEng.setCurrLocationContextAndBlock(LC: Pred->getLocationContext(),
497 B: CE.getEntry());
498 ExprEng.processCallEnter(CE, Pred);
499}
500
501void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
502 const CFGBlock * B, ExplodedNode *Pred) {
503 assert(B->succ_size() == 2);
504 ExplodedNodeSet Dst;
505 ExprEng.processBranch(Condition: Cond, Pred, Dst, DstT: *(B->succ_begin()),
506 DstF: *(B->succ_begin() + 1),
507 IterationsCompletedInLoop: getCompletedIterationCount(B, Pred));
508 // Enqueue the new frontier onto the worklist.
509 enqueue(Set&: Dst);
510}
511
512void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
513 const CFGBlock *B,
514 ExplodedNode *Pred) {
515 assert(B->succ_size() == 2);
516 ExplodedNodeSet Dst;
517 ExprEng.processCleanupTemporaryBranch(BTE, Pred, Dst, DstT: *(B->succ_begin()),
518 DstF: *(B->succ_begin() + 1));
519 // Enqueue the new frontier onto the worklist.
520 enqueue(Set&: Dst);
521}
522
523void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
524 ExplodedNode *Pred) {
525 assert(B->succ_size() == 2);
526 ExplodedNodeSet Dst;
527 ExprEng.processStaticInitializer(DS, Pred, Dst, DstT: *(B->succ_begin()),
528 DstF: *(B->succ_begin() + 1));
529 // Enqueue the new frontier onto the worklist.
530 enqueue(Set&: Dst);
531}
532
533void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
534 ExplodedNode *Pred) {
535 assert(B);
536 assert(!B->empty());
537
538 // We no-op by skipping any FullExprCleanup
539 while (StmtIdx < B->size() &&
540 (*B)[StmtIdx].getKind() == CFGElement::FullExprCleanup) {
541 StmtIdx++;
542 }
543
544 if (StmtIdx == B->size())
545 HandleBlockExit(B, Pred);
546 else {
547 ExprEng.setCurrLocationContextAndBlock(LC: Pred->getLocationContext(), B);
548 ExprEng.processCFGElement(E: (*B)[StmtIdx], Pred, StmtIdx);
549 }
550}
551
552void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
553 ExplodedNode *Pred) {
554 const LocationContext *LCtx = Pred->getLocationContext();
555 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
556 Val: LCtx->getStackFrame()->getCallSite())) {
557 switch (CallerCtor->getConstructionKind()) {
558 case CXXConstructionKind::NonVirtualBase:
559 case CXXConstructionKind::VirtualBase: {
560 BlockEdge Loc(B, *B->succ_begin(), LCtx);
561 HandleBlockEdge(L: Loc, Pred);
562 return;
563 }
564 default:
565 break;
566 }
567 }
568
569 // We either don't see a parent stack frame because we're in the top frame,
570 // or the parent stack frame doesn't initialize our virtual bases.
571 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
572 HandleBlockEdge(L: Loc, Pred);
573}
574
575ExplodedNode *CoreEngine::makeNode(const ProgramPoint &Loc,
576 ProgramStateRef State, ExplodedNode *Pred,
577 bool MarkAsSink) const {
578 MarkAsSink = MarkAsSink || State->isPosteriorlyOverconstrained();
579
580 bool IsNew;
581 ExplodedNode *N = G.getNode(L: Loc, State, IsSink: MarkAsSink, IsNew: &IsNew);
582 N->addPredecessor(V: Pred, G);
583
584 return IsNew ? N : nullptr;
585}
586
587void CoreEngine::enqueueStmtNode(ExplodedNode *N,
588 const CFGBlock *Block, unsigned Idx) {
589 assert(Block);
590 assert(!N->isSink());
591
592 // Check if this node entered a callee.
593 if (N->getLocation().getAs<CallEnter>()) {
594 // Still use the index of the CallExpr. It's needed to create the callee
595 // StackFrameContext.
596 WList->enqueue(N, B: Block, idx: Idx);
597 return;
598 }
599
600 // Do not create extra nodes. Move to the next CFG element.
601 if (N->getLocation().getAs<PostInitializer>() ||
602 N->getLocation().getAs<PostImplicitCall>()||
603 N->getLocation().getAs<LoopExit>()) {
604 WList->enqueue(N, B: Block, idx: Idx+1);
605 return;
606 }
607
608 if (N->getLocation().getAs<EpsilonPoint>()) {
609 WList->enqueue(N, B: Block, idx: Idx);
610 return;
611 }
612
613 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
614 WList->enqueue(N, B: Block, idx: Idx+1);
615 return;
616 }
617
618 // At this point, we know we're processing a normal statement.
619 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
620 PostStmt Loc(CS.getStmt(), N->getLocationContext());
621
622 if (Loc == N->getLocation().withTag(tag: nullptr)) {
623 // Note: 'N' should be a fresh node because otherwise it shouldn't be
624 // a member of Deferred.
625 WList->enqueue(N, B: Block, idx: Idx+1);
626 return;
627 }
628
629 ExplodedNode *Succ = makeNode(Loc, State: N->getState(), Pred: N);
630
631 if (Succ)
632 WList->enqueue(N: Succ, B: Block, idx: Idx+1);
633}
634
635std::optional<unsigned>
636CoreEngine::getCompletedIterationCount(const CFGBlock *B,
637 ExplodedNode *Pred) const {
638 const LocationContext *LC = Pred->getLocationContext();
639 BlockCounter Counter = WList->getBlockCounter();
640 unsigned BlockCount =
641 Counter.getNumVisited(CallSite: LC->getStackFrame(), BlockID: B->getBlockID());
642
643 const Stmt *Term = B->getTerminatorStmt();
644 if (isa<ForStmt, WhileStmt, CXXForRangeStmt>(Val: Term)) {
645 assert(BlockCount >= 1 &&
646 "Block count of currently analyzed block must be >= 1");
647 return BlockCount - 1;
648 }
649 if (isa<DoStmt>(Val: Term)) {
650 // In a do-while loop one iteration happens before the first evaluation of
651 // the loop condition, so we don't subtract one.
652 return BlockCount;
653 }
654 // ObjCForCollectionStmt is skipped intentionally because the current
655 // application of the iteration counts is not relevant for it.
656 return std::nullopt;
657}
658
659void CoreEngine::enqueue(ExplodedNodeSet &Set) {
660 for (const auto I : Set)
661 WList->enqueue(N: I);
662}
663
664void CoreEngine::enqueueStmtNodes(ExplodedNodeSet &Set, const CFGBlock *Block,
665 unsigned Idx) {
666 for (const auto I : Set)
667 enqueueStmtNode(N: I, Block, Idx);
668}
669
670void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
671 for (ExplodedNode *Node : Set) {
672 const LocationContext *LocCtx = Node->getLocationContext();
673
674 // If we are in an inlined call, generate CallExitBegin node.
675 if (LocCtx->getParent()) {
676 // Use the callee location context.
677 CallExitBegin Loc(cast<StackFrameContext>(Val: LocCtx), RS);
678 if (ExplodedNode *Succ = makeNode(Loc, State: Node->getState(), Pred: Node))
679 WList->enqueue(N: Succ);
680 } else {
681 // TODO: We should run remove dead bindings here.
682 G.addEndOfPath(V: Node);
683 NumPathsExplored++;
684 }
685 }
686}
687
688ExplodedNode *NodeBuilder::generateNode(const ProgramPoint &Loc,
689 ProgramStateRef State,
690 ExplodedNode *FromN, bool MarkAsSink) {
691 HasGeneratedNodes = true;
692 Frontier.erase(N: FromN);
693 ExplodedNode *N = C.getEngine().makeNode(Loc, State, Pred: FromN, MarkAsSink);
694
695 Frontier.Add(N);
696
697 return N;
698}
699
700ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
701 bool Branch,
702 ExplodedNode *NodePred) {
703 const CFGBlock *Dst = Branch ? DstT : DstF;
704
705 if (!Dst)
706 return nullptr;
707
708 ProgramPoint Loc =
709 BlockEdge(C.getBlock(), Dst, NodePred->getLocationContext());
710 ExplodedNode *Succ = NodeBuilder::generateNode(Loc, State, FromN: NodePred);
711 return Succ;
712}
713
714ExplodedNode *IndirectGotoNodeBuilder::generateNode(const CFGBlock *Block,
715 ProgramStateRef St,
716 ExplodedNode *Pred) {
717 BlockEdge BE(C.getBlock(), Block, Pred->getLocationContext());
718 return generateNode(PP: BE, State: St, Pred);
719}
720
721ExplodedNode *SwitchNodeBuilder::generateCaseStmtNode(const CFGBlock *Block,
722 ProgramStateRef St,
723 ExplodedNode *Pred) {
724 BlockEdge BE(C.getBlock(), Block, Pred->getLocationContext());
725 return generateNode(Loc: BE, State: St, FromN: Pred);
726}
727
728ExplodedNode *SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
729 ExplodedNode *Pred) {
730 // Get the block for the default case.
731 const CFGBlock *Src = C.getBlock();
732 assert(Src->succ_rbegin() != Src->succ_rend());
733 CFGBlock *DefaultBlock = *Src->succ_rbegin();
734
735 // Basic correctness check for default blocks that are unreachable and not
736 // caught by earlier stages.
737 if (!DefaultBlock)
738 return nullptr;
739
740 BlockEdge BE(Src, DefaultBlock, Pred->getLocationContext());
741 return generateNode(Loc: BE, State: St, FromN: Pred);
742}
743