1//===- TypeErasedDataflowAnalysis.cpp -------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines type-erased base types and functions for building dataflow
10// analyses that run over Control-Flow Graphs (CFGs).
11//
12//===----------------------------------------------------------------------===//
13
14#include <optional>
15#include <system_error>
16#include <utility>
17#include <vector>
18
19#include "clang/AST/ASTDumper.h"
20#include "clang/AST/DeclCXX.h"
21#include "clang/AST/OperationKinds.h"
22#include "clang/AST/StmtCXX.h"
23#include "clang/AST/StmtVisitor.h"
24#include "clang/Analysis/Analyses/PostOrderCFGView.h"
25#include "clang/Analysis/CFG.h"
26#include "clang/Analysis/FlowSensitive/DataflowEnvironment.h"
27#include "clang/Analysis/FlowSensitive/DataflowLattice.h"
28#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
29#include "clang/Analysis/FlowSensitive/RecordOps.h"
30#include "clang/Analysis/FlowSensitive/Transfer.h"
31#include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h"
32#include "clang/Analysis/FlowSensitive/Value.h"
33#include "llvm/ADT/ArrayRef.h"
34#include "llvm/ADT/STLExtras.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/Support/Error.h"
37
38#define DEBUG_TYPE "clang-dataflow"
39
40namespace clang {
41namespace dataflow {
42
43/// Returns the index of `Block` in the successors of `Pred`.
44static int blockIndexInPredecessor(const CFGBlock &Pred,
45 const CFGBlock &Block) {
46 auto BlockPos = llvm::find_if(
47 Range: Pred.succs(), P: [&Block](const CFGBlock::AdjacentBlock &Succ) {
48 return Succ && Succ->getBlockID() == Block.getBlockID();
49 });
50 return BlockPos - Pred.succ_begin();
51}
52
53// A "backedge" node is a block introduced in the CFG exclusively to indicate a
54// loop backedge. They are exactly identified by the presence of a non-null
55// pointer to the entry block of the loop condition. Note that this is not
56// necessarily the block with the loop statement as terminator, because
57// short-circuit operators will result in multiple blocks encoding the loop
58// condition, only one of which will contain the loop statement as terminator.
59static bool isBackedgeNode(const CFGBlock &B) {
60 return B.getLoopTarget() != nullptr;
61}
62
63namespace {
64
65/// Extracts the terminator's condition expression.
66class TerminatorVisitor
67 : public ConstStmtVisitor<TerminatorVisitor, const Expr *> {
68public:
69 TerminatorVisitor() = default;
70 const Expr *VisitIfStmt(const IfStmt *S) { return S->getCond(); }
71 const Expr *VisitWhileStmt(const WhileStmt *S) { return S->getCond(); }
72 const Expr *VisitDoStmt(const DoStmt *S) { return S->getCond(); }
73 const Expr *VisitForStmt(const ForStmt *S) { return S->getCond(); }
74 const Expr *VisitCXXForRangeStmt(const CXXForRangeStmt *) {
75 // Don't do anything special for CXXForRangeStmt, because the condition
76 // (being implicitly generated) isn't visible from the loop body.
77 return nullptr;
78 }
79 const Expr *VisitBinaryOperator(const BinaryOperator *S) {
80 assert(S->getOpcode() == BO_LAnd || S->getOpcode() == BO_LOr);
81 return S->getLHS();
82 }
83 const Expr *VisitConditionalOperator(const ConditionalOperator *S) {
84 return S->getCond();
85 }
86};
87
88/// Holds data structures required for running dataflow analysis.
89struct AnalysisContext {
90 AnalysisContext(const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
91 const Environment &InitEnv,
92 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>>
93 BlockStates)
94 : ACFG(ACFG), Analysis(Analysis), InitEnv(InitEnv),
95 Log(*InitEnv.getDataflowAnalysisContext().getOptions().Log),
96 BlockStates(BlockStates) {
97 Log.beginAnalysis(ACFG, Analysis);
98 }
99 ~AnalysisContext() { Log.endAnalysis(); }
100
101 /// Contains the CFG being analyzed.
102 const AdornedCFG &ACFG;
103 /// The analysis to be run.
104 TypeErasedDataflowAnalysis &Analysis;
105 /// Initial state to start the analysis.
106 const Environment &InitEnv;
107 Logger &Log;
108 /// Stores the state of a CFG block if it has been evaluated by the analysis.
109 /// The indices correspond to the block IDs.
110 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>> BlockStates;
111};
112
113class PrettyStackTraceAnalysis : public llvm::PrettyStackTraceEntry {
114public:
115 PrettyStackTraceAnalysis(const AdornedCFG &ACFG, const char *Message)
116 : ACFG(ACFG), Message(Message) {}
117
118 void print(raw_ostream &OS) const override {
119 OS << Message << "\n";
120 OS << "Decl:\n";
121 ACFG.getDecl().dump(Out&: OS);
122 OS << "CFG:\n";
123 ACFG.getCFG().print(OS, LO: LangOptions(), ShowColors: false);
124 }
125
126private:
127 const AdornedCFG &ACFG;
128 const char *Message;
129};
130
131class PrettyStackTraceCFGElement : public llvm::PrettyStackTraceEntry {
132public:
133 PrettyStackTraceCFGElement(const CFGElement &Element, int BlockIdx,
134 int ElementIdx, const char *Message)
135 : Element(Element), BlockIdx(BlockIdx), ElementIdx(ElementIdx),
136 Message(Message) {}
137
138 void print(raw_ostream &OS) const override {
139 OS << Message << ": Element [B" << BlockIdx << "." << ElementIdx << "]\n";
140 if (auto Stmt = Element.getAs<CFGStmt>()) {
141 OS << "Stmt:\n";
142 ASTDumper Dumper(OS, false);
143 Dumper.Visit(Node: Stmt->getStmt());
144 }
145 }
146
147private:
148 const CFGElement &Element;
149 int BlockIdx;
150 int ElementIdx;
151 const char *Message;
152};
153
154// Builds a joined TypeErasedDataflowAnalysisState from 0 or more sources,
155// each of which may be owned (built as part of the join) or external (a
156// reference to an Environment that will outlive the builder).
157// Avoids unneccesary copies of the environment.
158class JoinedStateBuilder {
159 AnalysisContext &AC;
160 Environment::ExprJoinBehavior JoinBehavior;
161 std::vector<const TypeErasedDataflowAnalysisState *> All;
162 std::deque<TypeErasedDataflowAnalysisState> Owned;
163
164 TypeErasedDataflowAnalysisState
165 join(const TypeErasedDataflowAnalysisState &L,
166 const TypeErasedDataflowAnalysisState &R) {
167 return {AC.Analysis.joinTypeErased(L.Lattice, R.Lattice),
168 Environment::join(EnvA: L.Env, EnvB: R.Env, Model&: AC.Analysis, ExprBehavior: JoinBehavior)};
169 }
170
171public:
172 JoinedStateBuilder(AnalysisContext &AC,
173 Environment::ExprJoinBehavior JoinBehavior)
174 : AC(AC), JoinBehavior(JoinBehavior) {}
175
176 void addOwned(TypeErasedDataflowAnalysisState State) {
177 Owned.push_back(x: std::move(State));
178 All.push_back(x: &Owned.back());
179 }
180 void addUnowned(const TypeErasedDataflowAnalysisState &State) {
181 All.push_back(x: &State);
182 }
183 TypeErasedDataflowAnalysisState take() && {
184 if (All.empty())
185 // FIXME: Consider passing `Block` to Analysis.typeErasedInitialElement
186 // to enable building analyses like computation of dominators that
187 // initialize the state of each basic block differently.
188 return {AC.Analysis.typeErasedInitialElement(), AC.InitEnv.fork()};
189 if (All.size() == 1)
190 // Join the environment with itself so that we discard expression state if
191 // desired.
192 // FIXME: We could consider writing special-case code for this that only
193 // does the discarding, but it's not clear if this is worth it.
194 return {All[0]->Lattice, Environment::join(EnvA: All[0]->Env, EnvB: All[0]->Env,
195 Model&: AC.Analysis, ExprBehavior: JoinBehavior)};
196
197 auto Result = join(L: *All[0], R: *All[1]);
198 for (unsigned I = 2; I < All.size(); ++I)
199 Result = join(L: Result, R: *All[I]);
200 return Result;
201 }
202};
203} // namespace
204
205static const Expr *getTerminatorCondition(const Stmt *TerminatorStmt) {
206 return TerminatorStmt == nullptr ? nullptr
207 : TerminatorVisitor().Visit(S: TerminatorStmt);
208}
209
210/// Computes the input state for a given basic block by joining the output
211/// states of its predecessors.
212///
213/// Requirements:
214///
215/// All predecessors of `Block` except those with loop back edges must have
216/// already been transferred. States in `AC.BlockStates` that are set to
217/// `std::nullopt` represent basic blocks that are not evaluated yet.
218static TypeErasedDataflowAnalysisState
219computeBlockInputState(const CFGBlock &Block, AnalysisContext &AC) {
220 std::vector<const CFGBlock *> Preds(Block.pred_begin(), Block.pred_end());
221 if (Block.getTerminator().isTemporaryDtorsBranch()) {
222 // This handles a special case where the code that produced the CFG includes
223 // a conditional operator with a branch that constructs a temporary and
224 // calls a destructor annotated as noreturn. The CFG models this as follows:
225 //
226 // B1 (contains the condition of the conditional operator) - succs: B2, B3
227 // B2 (contains code that does not call a noreturn destructor) - succs: B4
228 // B3 (contains code that calls a noreturn destructor) - succs: B4
229 // B4 (has temporary destructor terminator) - succs: B5, B6
230 // B5 (noreturn block that is associated with the noreturn destructor call)
231 // B6 (contains code that follows the conditional operator statement)
232 //
233 // The first successor (B5 above) of a basic block with a temporary
234 // destructor terminator (B4 above) is the block that evaluates the
235 // destructor. If that block has a noreturn element then the predecessor
236 // block that constructed the temporary object (B3 above) is effectively a
237 // noreturn block and its state should not be used as input for the state
238 // of the block that has a temporary destructor terminator (B4 above). This
239 // holds regardless of which branch of the ternary operator calls the
240 // noreturn destructor. However, it doesn't cases where a nested ternary
241 // operator includes a branch that contains a noreturn destructor call.
242 //
243 // See `NoreturnDestructorTest` for concrete examples.
244 if (Block.succ_begin()->getReachableBlock() != nullptr &&
245 Block.succ_begin()->getReachableBlock()->hasNoReturnElement()) {
246 auto &StmtToBlock = AC.ACFG.getStmtToBlock();
247 auto StmtBlock = StmtToBlock.find(Val: Block.getTerminatorStmt());
248 assert(StmtBlock != StmtToBlock.end());
249 llvm::erase(C&: Preds, V: StmtBlock->getSecond());
250 }
251 }
252
253 // If any of the predecessor blocks contains an expression consumed in a
254 // different block, we need to keep expression state.
255 // Note that in this case, we keep expression state for all predecessors,
256 // rather than only those predecessors that actually contain an expression
257 // consumed in a different block. While this is potentially suboptimal, it's
258 // actually likely, if we have control flow within a full expression, that
259 // all predecessors have expression state consumed in a different block.
260 Environment::ExprJoinBehavior JoinBehavior = Environment::DiscardExprState;
261 for (const CFGBlock *Pred : Preds) {
262 if (Pred && AC.ACFG.containsExprConsumedInDifferentBlock(B: *Pred)) {
263 JoinBehavior = Environment::KeepExprState;
264 break;
265 }
266 }
267
268 JoinedStateBuilder Builder(AC, JoinBehavior);
269 for (const CFGBlock *Pred : Preds) {
270 // Skip if the `Block` is unreachable or control flow cannot get past it.
271 if (!Pred || Pred->hasNoReturnElement())
272 continue;
273
274 // Skip if `Pred` was not evaluated yet. This could happen if `Pred` has a
275 // loop back edge to `Block`.
276 const std::optional<TypeErasedDataflowAnalysisState> &MaybePredState =
277 AC.BlockStates[Pred->getBlockID()];
278 if (!MaybePredState)
279 continue;
280
281 const TypeErasedDataflowAnalysisState &PredState = *MaybePredState;
282 const Expr *Cond = getTerminatorCondition(TerminatorStmt: Pred->getTerminatorStmt());
283 if (Cond == nullptr) {
284 Builder.addUnowned(State: PredState);
285 continue;
286 }
287
288 bool BranchVal = blockIndexInPredecessor(Pred: *Pred, Block) == 0;
289
290 // `transferBranch` may need to mutate the environment to describe the
291 // dynamic effect of the terminator for a given branch. Copy now.
292 TypeErasedDataflowAnalysisState Copy = MaybePredState->fork();
293 if (AC.Analysis.builtinOptions()) {
294 auto *CondVal = Copy.Env.get<BoolValue>(E: *Cond);
295 // In transferCFGBlock(), we ensure that we always have a `Value`
296 // for the terminator condition, so assert this. We consciously
297 // assert ourselves instead of asserting via `cast()` so that we get
298 // a more meaningful line number if the assertion fails.
299 assert(CondVal != nullptr);
300 BoolValue *AssertedVal =
301 BranchVal ? CondVal : &Copy.Env.makeNot(Val&: *CondVal);
302 Copy.Env.assume(AssertedVal->formula());
303 }
304 AC.Analysis.transferBranchTypeErased(Branch: BranchVal, Cond, Copy.Lattice,
305 Copy.Env);
306 Builder.addOwned(State: std::move(Copy));
307 }
308 return std::move(Builder).take();
309}
310
311/// Built-in transfer function for `CFGStmt`.
312static void
313builtinTransferStatement(unsigned CurBlockID, const CFGStmt &Elt,
314 TypeErasedDataflowAnalysisState &InputState,
315 AnalysisContext &AC) {
316 const Stmt *S = Elt.getStmt();
317 assert(S != nullptr);
318 transfer(StmtToEnv: StmtToEnvMap(AC.ACFG, AC.BlockStates, CurBlockID, InputState), S: *S,
319 Env&: InputState.Env, Model&: AC.Analysis);
320}
321
322/// Built-in transfer function for `CFGInitializer`.
323static void
324builtinTransferInitializer(const CFGInitializer &Elt,
325 TypeErasedDataflowAnalysisState &InputState) {
326 const CXXCtorInitializer *Init = Elt.getInitializer();
327 assert(Init != nullptr);
328
329 auto &Env = InputState.Env;
330 auto &ThisLoc = *Env.getThisPointeeStorageLocation();
331
332 if (!Init->isAnyMemberInitializer())
333 // FIXME: Handle base initialization
334 return;
335
336 auto *InitExpr = Init->getInit();
337 assert(InitExpr != nullptr);
338
339 const FieldDecl *Member = nullptr;
340 RecordStorageLocation *ParentLoc = &ThisLoc;
341 StorageLocation *MemberLoc = nullptr;
342 if (Init->isMemberInitializer()) {
343 Member = Init->getMember();
344 MemberLoc = ThisLoc.getChild(D: *Member);
345 } else {
346 IndirectFieldDecl *IndirectField = Init->getIndirectMember();
347 assert(IndirectField != nullptr);
348 MemberLoc = &ThisLoc;
349 for (const auto *I : IndirectField->chain()) {
350 Member = cast<FieldDecl>(Val: I);
351 ParentLoc = cast<RecordStorageLocation>(Val: MemberLoc);
352 MemberLoc = ParentLoc->getChild(D: *Member);
353 }
354 }
355 assert(Member != nullptr);
356
357 // FIXME: Instead of these case distinctions, we would ideally want to be able
358 // to simply use `Environment::createObject()` here, the same way that we do
359 // this in `TransferVisitor::VisitInitListExpr()`. However, this would require
360 // us to be able to build a list of fields that we then use to initialize an
361 // `RecordStorageLocation` -- and the problem is that, when we get here,
362 // the `RecordStorageLocation` already exists. We should explore if there's
363 // anything that we can do to change this.
364 if (Member->getType()->isReferenceType()) {
365 auto *InitExprLoc = Env.getStorageLocation(E: *InitExpr);
366 if (InitExprLoc == nullptr)
367 return;
368
369 ParentLoc->setChild(D: *Member, Loc: InitExprLoc);
370 // Record-type initializers construct themselves directly into the result
371 // object, so there is no need to handle them here.
372 } else if (!Member->getType()->isRecordType()) {
373 assert(MemberLoc != nullptr);
374 if (auto *InitExprVal = Env.getValue(E: *InitExpr))
375 Env.setValue(Loc: *MemberLoc, Val&: *InitExprVal);
376 }
377}
378
379static void builtinTransfer(unsigned CurBlockID, const CFGElement &Elt,
380 TypeErasedDataflowAnalysisState &State,
381 AnalysisContext &AC) {
382 switch (Elt.getKind()) {
383 case CFGElement::Statement:
384 builtinTransferStatement(CurBlockID, Elt: Elt.castAs<CFGStmt>(), InputState&: State, AC);
385 break;
386 case CFGElement::Initializer:
387 builtinTransferInitializer(Elt: Elt.castAs<CFGInitializer>(), InputState&: State);
388 break;
389 case CFGElement::LifetimeEnds:
390 // Removing declarations when their lifetime ends serves two purposes:
391 // - Eliminate unnecessary clutter from `Environment::DeclToLoc`
392 // - Allow us to assert that, when joining two `Environment`s, the two
393 // `DeclToLoc` maps never contain entries that map the same declaration to
394 // different storage locations.
395 if (const ValueDecl *VD = Elt.castAs<CFGLifetimeEnds>().getVarDecl())
396 State.Env.removeDecl(D: *VD);
397 break;
398 default:
399 // FIXME: Evaluate other kinds of `CFGElement`
400 break;
401 }
402}
403
404/// Transfers `State` by evaluating each element in the `Block` based on the
405/// `AC.Analysis` specified.
406///
407/// Built-in transfer functions (if the option for `ApplyBuiltinTransfer` is set
408/// by the analysis) will be applied to the element before evaluation by the
409/// user-specified analysis.
410/// `PostVisitCFG` (if provided) will be applied to the element after evaluation
411/// by the user-specified analysis.
412static TypeErasedDataflowAnalysisState
413transferCFGBlock(const CFGBlock &Block, AnalysisContext &AC,
414 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks = {}) {
415 AC.Log.enterBlock(Block, PostVisit: PostAnalysisCallbacks.Before != nullptr ||
416 PostAnalysisCallbacks.After != nullptr);
417 auto State = computeBlockInputState(Block, AC);
418 AC.Log.recordState(State);
419 int ElementIdx = 1;
420 for (const auto &Element : Block) {
421 PrettyStackTraceCFGElement CrashInfo(Element, Block.getBlockID(),
422 ElementIdx++, "transferCFGBlock");
423
424 AC.Log.enterElement(Element);
425
426 if (PostAnalysisCallbacks.Before) {
427 PostAnalysisCallbacks.Before(Element, State);
428 }
429
430 // Built-in analysis
431 if (AC.Analysis.builtinOptions()) {
432 builtinTransfer(CurBlockID: Block.getBlockID(), Elt: Element, State, AC);
433 }
434
435 // User-provided analysis
436 AC.Analysis.transferTypeErased(Element, State.Lattice, State.Env);
437
438 if (PostAnalysisCallbacks.After) {
439 PostAnalysisCallbacks.After(Element, State);
440 }
441
442 AC.Log.recordState(State);
443 }
444
445 // If we have a terminator, evaluate its condition.
446 // This `Expr` may not appear as a `CFGElement` anywhere else, and it's
447 // important that we evaluate it here (rather than while processing the
448 // terminator) so that we put the corresponding value in the right
449 // environment.
450 if (const Expr *TerminatorCond =
451 dyn_cast_or_null<Expr>(Val: Block.getTerminatorCondition())) {
452 if (State.Env.getValue(E: *TerminatorCond) == nullptr)
453 // FIXME: This only runs the builtin transfer, not the analysis-specific
454 // transfer. Fixing this isn't trivial, as the analysis-specific transfer
455 // takes a `CFGElement` as input, but some expressions only show up as a
456 // terminator condition, but not as a `CFGElement`. The condition of an if
457 // statement is one such example.
458 transfer(StmtToEnv: StmtToEnvMap(AC.ACFG, AC.BlockStates, Block.getBlockID(), State),
459 S: *TerminatorCond, Env&: State.Env, Model&: AC.Analysis);
460
461 // If the transfer function didn't produce a value, create an atom so that
462 // we have *some* value for the condition expression. This ensures that
463 // when we extend the flow condition, it actually changes.
464 if (State.Env.getValue(E: *TerminatorCond) == nullptr)
465 State.Env.setValue(E: *TerminatorCond, Val&: State.Env.makeAtomicBoolValue());
466 AC.Log.recordState(State);
467 }
468
469 return State;
470}
471
472llvm::Expected<std::vector<std::optional<TypeErasedDataflowAnalysisState>>>
473runTypeErasedDataflowAnalysis(
474 const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
475 const Environment &InitEnv,
476 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks,
477 std::int32_t MaxBlockVisits) {
478 PrettyStackTraceAnalysis CrashInfo(ACFG, "runTypeErasedDataflowAnalysis");
479
480 std::optional<Environment> MaybeStartingEnv;
481 if (InitEnv.callStackSize() == 0) {
482 MaybeStartingEnv = InitEnv.fork();
483 MaybeStartingEnv->initialize();
484 }
485 const Environment &StartingEnv =
486 MaybeStartingEnv ? *MaybeStartingEnv : InitEnv;
487
488 const clang::CFG &CFG = ACFG.getCFG();
489 PostOrderCFGView POV(&CFG);
490 ForwardDataflowWorklist Worklist(CFG, &POV);
491
492 std::vector<std::optional<TypeErasedDataflowAnalysisState>> BlockStates(
493 CFG.size());
494
495 // The entry basic block doesn't contain statements so it can be skipped.
496 const CFGBlock &Entry = CFG.getEntry();
497 BlockStates[Entry.getBlockID()] = {Analysis.typeErasedInitialElement(),
498 StartingEnv.fork()};
499 Worklist.enqueueSuccessors(Block: &Entry);
500
501 AnalysisContext AC(ACFG, Analysis, StartingEnv, BlockStates);
502 std::int32_t BlockVisits = 0;
503 while (const CFGBlock *Block = Worklist.dequeue()) {
504 LLVM_DEBUG(llvm::dbgs()
505 << "Processing Block " << Block->getBlockID() << "\n");
506 if (++BlockVisits > MaxBlockVisits) {
507 return llvm::createStringError(EC: std::errc::timed_out,
508 Fmt: "maximum number of blocks processed");
509 }
510
511 const std::optional<TypeErasedDataflowAnalysisState> &OldBlockState =
512 BlockStates[Block->getBlockID()];
513 TypeErasedDataflowAnalysisState NewBlockState =
514 transferCFGBlock(Block: *Block, AC);
515 LLVM_DEBUG({
516 llvm::errs() << "New Env:\n";
517 NewBlockState.Env.dump();
518 });
519
520 if (OldBlockState) {
521 LLVM_DEBUG({
522 llvm::errs() << "Old Env:\n";
523 OldBlockState->Env.dump();
524 });
525 if (isBackedgeNode(B: *Block)) {
526 LatticeJoinEffect Effect1 = Analysis.widenTypeErased(
527 Current&: NewBlockState.Lattice, Previous: OldBlockState->Lattice);
528 LatticeJoinEffect Effect2 =
529 NewBlockState.Env.widen(PrevEnv: OldBlockState->Env, Model&: Analysis);
530 if (Effect1 == LatticeJoinEffect::Unchanged &&
531 Effect2 == LatticeJoinEffect::Unchanged) {
532 // The state of `Block` didn't change from widening so there's no need
533 // to revisit its successors.
534 AC.Log.blockConverged();
535 continue;
536 }
537 } else if (Analysis.isEqualTypeErased(OldBlockState->Lattice,
538 NewBlockState.Lattice) &&
539 OldBlockState->Env.equivalentTo(Other: NewBlockState.Env, Model&: Analysis)) {
540 // The state of `Block` didn't change after transfer so there's no need
541 // to revisit its successors.
542 AC.Log.blockConverged();
543 continue;
544 }
545 }
546
547 BlockStates[Block->getBlockID()] = std::move(NewBlockState);
548
549 // Do not add unreachable successor blocks to `Worklist`.
550 if (Block->hasNoReturnElement())
551 continue;
552
553 Worklist.enqueueSuccessors(Block);
554 }
555 // FIXME: Consider evaluating unreachable basic blocks (those that have a
556 // state set to `std::nullopt` at this point) to also analyze dead code.
557
558 if (PostAnalysisCallbacks.Before || PostAnalysisCallbacks.After) {
559 for (const CFGBlock *Block : ACFG.getCFG()) {
560 // Skip blocks that were not evaluated.
561 if (!BlockStates[Block->getBlockID()])
562 continue;
563 transferCFGBlock(Block: *Block, AC, PostAnalysisCallbacks);
564 }
565 }
566
567 return std::move(BlockStates);
568}
569
570} // namespace dataflow
571} // namespace clang
572