1 | //===- UninitializedValues.cpp - Find Uninitialized Values ----------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file implements uninitialized values analysis for source-level CFGs. |
10 | // |
11 | //===----------------------------------------------------------------------===// |
12 | |
13 | #include "clang/Analysis/Analyses/UninitializedValues.h" |
14 | #include "clang/AST/Attr.h" |
15 | #include "clang/AST/Decl.h" |
16 | #include "clang/AST/DeclBase.h" |
17 | #include "clang/AST/Expr.h" |
18 | #include "clang/AST/OperationKinds.h" |
19 | #include "clang/AST/Stmt.h" |
20 | #include "clang/AST/StmtObjC.h" |
21 | #include "clang/AST/StmtVisitor.h" |
22 | #include "clang/AST/Type.h" |
23 | #include "clang/Analysis/Analyses/PostOrderCFGView.h" |
24 | #include "clang/Analysis/AnalysisDeclContext.h" |
25 | #include "clang/Analysis/CFG.h" |
26 | #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h" |
27 | #include "clang/Analysis/FlowSensitive/DataflowWorklist.h" |
28 | #include "clang/Basic/LLVM.h" |
29 | #include "llvm/ADT/BitVector.h" |
30 | #include "llvm/ADT/DenseMap.h" |
31 | #include "llvm/ADT/PackedVector.h" |
32 | #include "llvm/ADT/SmallBitVector.h" |
33 | #include "llvm/ADT/SmallVector.h" |
34 | #include "llvm/Support/Casting.h" |
35 | #include <algorithm> |
36 | #include <cassert> |
37 | #include <optional> |
38 | |
39 | using namespace clang; |
40 | |
41 | #define DEBUG_LOGGING 0 |
42 | |
43 | static bool recordIsNotEmpty(const RecordDecl *RD) { |
44 | // We consider a record decl to be empty if it contains only unnamed bit- |
45 | // fields, zero-width fields, and fields of empty record type. |
46 | for (const auto *FD : RD->fields()) { |
47 | if (FD->isUnnamedBitField()) |
48 | continue; |
49 | if (FD->isZeroSize(Ctx: FD->getASTContext())) |
50 | continue; |
51 | // The only case remaining to check is for a field declaration of record |
52 | // type and whether that record itself is empty. |
53 | if (const auto *FieldRD = FD->getType()->getAsRecordDecl(); |
54 | !FieldRD || recordIsNotEmpty(RD: FieldRD)) |
55 | return true; |
56 | } |
57 | return false; |
58 | } |
59 | |
60 | static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { |
61 | if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && |
62 | !vd->isExceptionVariable() && !vd->isInitCapture() && !vd->isImplicit() && |
63 | vd->getDeclContext() == dc) { |
64 | QualType ty = vd->getType(); |
65 | if (const auto *RD = ty->getAsRecordDecl()) |
66 | return recordIsNotEmpty(RD); |
67 | return ty->isScalarType() || ty->isVectorType() || ty->isRVVSizelessBuiltinType(); |
68 | } |
69 | return false; |
70 | } |
71 | |
72 | //------------------------------------------------------------------------====// |
73 | // DeclToIndex: a mapping from Decls we track to value indices. |
74 | //====------------------------------------------------------------------------// |
75 | |
76 | namespace { |
77 | |
78 | class DeclToIndex { |
79 | llvm::DenseMap<const VarDecl *, unsigned> map; |
80 | |
81 | public: |
82 | DeclToIndex() = default; |
83 | |
84 | /// Compute the actual mapping from declarations to bits. |
85 | void computeMap(const DeclContext &dc); |
86 | |
87 | /// Return the number of declarations in the map. |
88 | unsigned size() const { return map.size(); } |
89 | |
90 | /// Returns the bit vector index for a given declaration. |
91 | std::optional<unsigned> getValueIndex(const VarDecl *d) const; |
92 | }; |
93 | |
94 | } // namespace |
95 | |
96 | void DeclToIndex::computeMap(const DeclContext &dc) { |
97 | unsigned count = 0; |
98 | DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), |
99 | E(dc.decls_end()); |
100 | for ( ; I != E; ++I) { |
101 | const VarDecl *vd = *I; |
102 | if (isTrackedVar(vd, dc: &dc)) |
103 | map[vd] = count++; |
104 | } |
105 | } |
106 | |
107 | std::optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { |
108 | llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(Val: d); |
109 | if (I == map.end()) |
110 | return std::nullopt; |
111 | return I->second; |
112 | } |
113 | |
114 | //------------------------------------------------------------------------====// |
115 | // CFGBlockValues: dataflow values for CFG blocks. |
116 | //====------------------------------------------------------------------------// |
117 | |
118 | // These values are defined in such a way that a merge can be done using |
119 | // a bitwise OR. |
120 | enum Value { Unknown = 0x0, /* 00 */ |
121 | Initialized = 0x1, /* 01 */ |
122 | Uninitialized = 0x2, /* 10 */ |
123 | MayUninitialized = 0x3 /* 11 */ }; |
124 | |
125 | static bool isUninitialized(const Value v) { |
126 | return v >= Uninitialized; |
127 | } |
128 | |
129 | static bool isAlwaysUninit(const Value v) { |
130 | return v == Uninitialized; |
131 | } |
132 | |
133 | namespace { |
134 | |
135 | using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>; |
136 | |
137 | class CFGBlockValues { |
138 | const CFG &cfg; |
139 | SmallVector<ValueVector, 8> vals; |
140 | ValueVector scratch; |
141 | DeclToIndex declToIndex; |
142 | |
143 | public: |
144 | CFGBlockValues(const CFG &cfg); |
145 | |
146 | unsigned getNumEntries() const { return declToIndex.size(); } |
147 | |
148 | void computeSetOfDeclarations(const DeclContext &dc); |
149 | |
150 | ValueVector &getValueVector(const CFGBlock *block) { |
151 | return vals[block->getBlockID()]; |
152 | } |
153 | |
154 | void setAllScratchValues(Value V); |
155 | void mergeIntoScratch(ValueVector const &source, bool isFirst); |
156 | bool updateValueVectorWithScratch(const CFGBlock *block); |
157 | |
158 | bool hasNoDeclarations() const { |
159 | return declToIndex.size() == 0; |
160 | } |
161 | |
162 | void resetScratch(); |
163 | |
164 | ValueVector::reference operator[](const VarDecl *vd); |
165 | |
166 | Value getValue(const CFGBlock *block, const CFGBlock *dstBlock, |
167 | const VarDecl *vd) { |
168 | std::optional<unsigned> idx = declToIndex.getValueIndex(d: vd); |
169 | return getValueVector(block)[*idx]; |
170 | } |
171 | }; |
172 | |
173 | } // namespace |
174 | |
175 | CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {} |
176 | |
177 | void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { |
178 | declToIndex.computeMap(dc); |
179 | unsigned decls = declToIndex.size(); |
180 | scratch.resize(N: decls); |
181 | unsigned n = cfg.getNumBlockIDs(); |
182 | if (!n) |
183 | return; |
184 | vals.resize(N: n); |
185 | for (auto &val : vals) |
186 | val.resize(N: decls); |
187 | } |
188 | |
189 | #if DEBUG_LOGGING |
190 | static void printVector(const CFGBlock *block, ValueVector &bv, |
191 | unsigned num) { |
192 | llvm::errs() << block->getBlockID() << " :" ; |
193 | for (const auto &i : bv) |
194 | llvm::errs() << ' ' << i; |
195 | llvm::errs() << " : " << num << '\n'; |
196 | } |
197 | #endif |
198 | |
199 | void CFGBlockValues::setAllScratchValues(Value V) { |
200 | for (unsigned I = 0, E = scratch.size(); I != E; ++I) |
201 | scratch[I] = V; |
202 | } |
203 | |
204 | void CFGBlockValues::mergeIntoScratch(ValueVector const &source, |
205 | bool isFirst) { |
206 | if (isFirst) |
207 | scratch = source; |
208 | else |
209 | scratch |= source; |
210 | } |
211 | |
212 | bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { |
213 | ValueVector &dst = getValueVector(block); |
214 | bool changed = (dst != scratch); |
215 | if (changed) |
216 | dst = scratch; |
217 | #if DEBUG_LOGGING |
218 | printVector(block, scratch, 0); |
219 | #endif |
220 | return changed; |
221 | } |
222 | |
223 | void CFGBlockValues::resetScratch() { |
224 | scratch.reset(); |
225 | } |
226 | |
227 | ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { |
228 | return scratch[*declToIndex.getValueIndex(d: vd)]; |
229 | } |
230 | |
231 | //------------------------------------------------------------------------====// |
232 | // Classification of DeclRefExprs as use or initialization. |
233 | //====------------------------------------------------------------------------// |
234 | |
235 | namespace { |
236 | |
237 | class FindVarResult { |
238 | const VarDecl *vd; |
239 | const DeclRefExpr *dr; |
240 | |
241 | public: |
242 | FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {} |
243 | |
244 | const DeclRefExpr *getDeclRefExpr() const { return dr; } |
245 | const VarDecl *getDecl() const { return vd; } |
246 | }; |
247 | |
248 | } // namespace |
249 | |
250 | static const Expr *stripCasts(ASTContext &C, const Expr *Ex) { |
251 | while (Ex) { |
252 | Ex = Ex->IgnoreParenNoopCasts(Ctx: C); |
253 | if (const auto *CE = dyn_cast<CastExpr>(Val: Ex)) { |
254 | if (CE->getCastKind() == CK_LValueBitCast) { |
255 | Ex = CE->getSubExpr(); |
256 | continue; |
257 | } |
258 | } |
259 | break; |
260 | } |
261 | return Ex; |
262 | } |
263 | |
264 | /// If E is an expression comprising a reference to a single variable, find that |
265 | /// variable. |
266 | static FindVarResult findVar(const Expr *E, const DeclContext *DC) { |
267 | if (const auto *DRE = |
268 | dyn_cast<DeclRefExpr>(Val: stripCasts(C&: DC->getParentASTContext(), Ex: E))) |
269 | if (const auto *VD = dyn_cast<VarDecl>(Val: DRE->getDecl())) |
270 | if (isTrackedVar(vd: VD, dc: DC)) |
271 | return FindVarResult(VD, DRE); |
272 | return FindVarResult(nullptr, nullptr); |
273 | } |
274 | |
275 | namespace { |
276 | |
277 | /// Classify each DeclRefExpr as an initialization or a use. Any |
278 | /// DeclRefExpr which isn't explicitly classified will be assumed to have |
279 | /// escaped the analysis and will be treated as an initialization. |
280 | class ClassifyRefs : public StmtVisitor<ClassifyRefs> { |
281 | public: |
282 | enum Class { |
283 | Init, |
284 | Use, |
285 | SelfInit, |
286 | ConstRefUse, |
287 | Ignore |
288 | }; |
289 | |
290 | private: |
291 | const DeclContext *DC; |
292 | llvm::DenseMap<const DeclRefExpr *, Class> Classification; |
293 | |
294 | bool isTrackedVar(const VarDecl *VD) const { |
295 | return ::isTrackedVar(vd: VD, dc: DC); |
296 | } |
297 | |
298 | void classify(const Expr *E, Class C); |
299 | |
300 | public: |
301 | ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(Val: AC.getDecl())) {} |
302 | |
303 | void VisitDeclStmt(DeclStmt *DS); |
304 | void VisitUnaryOperator(UnaryOperator *UO); |
305 | void VisitBinaryOperator(BinaryOperator *BO); |
306 | void VisitCallExpr(CallExpr *CE); |
307 | void VisitCastExpr(CastExpr *CE); |
308 | void VisitOMPExecutableDirective(OMPExecutableDirective *ED); |
309 | |
310 | void operator()(Stmt *S) { Visit(S); } |
311 | |
312 | Class get(const DeclRefExpr *DRE) const { |
313 | llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I |
314 | = Classification.find(Val: DRE); |
315 | if (I != Classification.end()) |
316 | return I->second; |
317 | |
318 | const auto *VD = dyn_cast<VarDecl>(Val: DRE->getDecl()); |
319 | if (!VD || !isTrackedVar(VD)) |
320 | return Ignore; |
321 | |
322 | return Init; |
323 | } |
324 | }; |
325 | |
326 | } // namespace |
327 | |
328 | static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) { |
329 | if (VD->getType()->isRecordType()) |
330 | return nullptr; |
331 | if (Expr *Init = VD->getInit()) { |
332 | const auto *DRE = |
333 | dyn_cast<DeclRefExpr>(Val: stripCasts(C&: VD->getASTContext(), Ex: Init)); |
334 | if (DRE && DRE->getDecl() == VD) |
335 | return DRE; |
336 | } |
337 | return nullptr; |
338 | } |
339 | |
340 | void ClassifyRefs::classify(const Expr *E, Class C) { |
341 | // The result of a ?: could also be an lvalue. |
342 | E = E->IgnoreParens(); |
343 | if (const auto *CO = dyn_cast<ConditionalOperator>(Val: E)) { |
344 | classify(E: CO->getTrueExpr(), C); |
345 | classify(E: CO->getFalseExpr(), C); |
346 | return; |
347 | } |
348 | |
349 | if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(Val: E)) { |
350 | classify(E: BCO->getFalseExpr(), C); |
351 | return; |
352 | } |
353 | |
354 | if (const auto *OVE = dyn_cast<OpaqueValueExpr>(Val: E)) { |
355 | classify(E: OVE->getSourceExpr(), C); |
356 | return; |
357 | } |
358 | |
359 | if (const auto *ME = dyn_cast<MemberExpr>(Val: E)) { |
360 | if (const auto *VD = dyn_cast<VarDecl>(Val: ME->getMemberDecl())) { |
361 | if (!VD->isStaticDataMember()) |
362 | classify(E: ME->getBase(), C); |
363 | } |
364 | return; |
365 | } |
366 | |
367 | if (const auto *BO = dyn_cast<BinaryOperator>(Val: E)) { |
368 | switch (BO->getOpcode()) { |
369 | case BO_PtrMemD: |
370 | case BO_PtrMemI: |
371 | classify(E: BO->getLHS(), C); |
372 | return; |
373 | case BO_Comma: |
374 | classify(E: BO->getRHS(), C); |
375 | return; |
376 | default: |
377 | return; |
378 | } |
379 | } |
380 | |
381 | FindVarResult Var = findVar(E, DC); |
382 | if (const DeclRefExpr *DRE = Var.getDeclRefExpr()) |
383 | Classification[DRE] = std::max(a: Classification[DRE], b: C); |
384 | } |
385 | |
386 | void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) { |
387 | for (auto *DI : DS->decls()) { |
388 | auto *VD = dyn_cast<VarDecl>(Val: DI); |
389 | if (VD && isTrackedVar(VD)) |
390 | if (const DeclRefExpr *DRE = getSelfInitExpr(VD)) |
391 | Classification[DRE] = SelfInit; |
392 | } |
393 | } |
394 | |
395 | void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) { |
396 | // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this |
397 | // is not a compound-assignment, we will treat it as initializing the variable |
398 | // when TransferFunctions visits it. A compound-assignment does not affect |
399 | // whether a variable is uninitialized, and there's no point counting it as a |
400 | // use. |
401 | if (BO->isCompoundAssignmentOp()) |
402 | classify(E: BO->getLHS(), C: Use); |
403 | else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma) |
404 | classify(E: BO->getLHS(), C: Ignore); |
405 | } |
406 | |
407 | void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) { |
408 | // Increment and decrement are uses despite there being no lvalue-to-rvalue |
409 | // conversion. |
410 | if (UO->isIncrementDecrementOp()) |
411 | classify(E: UO->getSubExpr(), C: Use); |
412 | } |
413 | |
414 | void ClassifyRefs::VisitOMPExecutableDirective(OMPExecutableDirective *ED) { |
415 | for (Stmt *S : OMPExecutableDirective::used_clauses_children(Clauses: ED->clauses())) |
416 | classify(E: cast<Expr>(Val: S), C: Use); |
417 | } |
418 | |
419 | static bool isPointerToConst(const QualType &QT) { |
420 | return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified(); |
421 | } |
422 | |
423 | static bool hasTrivialBody(CallExpr *CE) { |
424 | if (FunctionDecl *FD = CE->getDirectCallee()) { |
425 | if (FunctionTemplateDecl *FTD = FD->getPrimaryTemplate()) |
426 | return FTD->getTemplatedDecl()->hasTrivialBody(); |
427 | return FD->hasTrivialBody(); |
428 | } |
429 | return false; |
430 | } |
431 | |
432 | void ClassifyRefs::VisitCallExpr(CallExpr *CE) { |
433 | // Classify arguments to std::move as used. |
434 | if (CE->isCallToStdMove()) { |
435 | // RecordTypes are handled in SemaDeclCXX.cpp. |
436 | if (!CE->getArg(Arg: 0)->getType()->isRecordType()) |
437 | classify(E: CE->getArg(Arg: 0), C: Use); |
438 | return; |
439 | } |
440 | bool isTrivialBody = hasTrivialBody(CE); |
441 | // If a value is passed by const pointer to a function, |
442 | // we should not assume that it is initialized by the call, and we |
443 | // conservatively do not assume that it is used. |
444 | // If a value is passed by const reference to a function, |
445 | // it should already be initialized. |
446 | for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end(); |
447 | I != E; ++I) { |
448 | if ((*I)->isGLValue()) { |
449 | if ((*I)->getType().isConstQualified()) |
450 | classify(E: (*I), C: isTrivialBody ? Ignore : ConstRefUse); |
451 | } else if (isPointerToConst(QT: (*I)->getType())) { |
452 | const Expr *Ex = stripCasts(C&: DC->getParentASTContext(), Ex: *I); |
453 | const auto *UO = dyn_cast<UnaryOperator>(Val: Ex); |
454 | if (UO && UO->getOpcode() == UO_AddrOf) |
455 | Ex = UO->getSubExpr(); |
456 | classify(E: Ex, C: Ignore); |
457 | } |
458 | } |
459 | } |
460 | |
461 | void ClassifyRefs::VisitCastExpr(CastExpr *CE) { |
462 | if (CE->getCastKind() == CK_LValueToRValue) |
463 | classify(E: CE->getSubExpr(), C: Use); |
464 | else if (const auto *CSE = dyn_cast<CStyleCastExpr>(Val: CE)) { |
465 | if (CSE->getType()->isVoidType()) { |
466 | // Squelch any detected load of an uninitialized value if |
467 | // we cast it to void. |
468 | // e.g. (void) x; |
469 | classify(E: CSE->getSubExpr(), C: Ignore); |
470 | } |
471 | } |
472 | } |
473 | |
474 | //------------------------------------------------------------------------====// |
475 | // Transfer function for uninitialized values analysis. |
476 | //====------------------------------------------------------------------------// |
477 | |
478 | namespace { |
479 | |
480 | class TransferFunctions : public StmtVisitor<TransferFunctions> { |
481 | CFGBlockValues &vals; |
482 | const CFG &cfg; |
483 | const CFGBlock *block; |
484 | AnalysisDeclContext ∾ |
485 | const ClassifyRefs &classification; |
486 | ObjCNoReturn objCNoRet; |
487 | UninitVariablesHandler &handler; |
488 | |
489 | public: |
490 | TransferFunctions(CFGBlockValues &vals, const CFG &cfg, |
491 | const CFGBlock *block, AnalysisDeclContext &ac, |
492 | const ClassifyRefs &classification, |
493 | UninitVariablesHandler &handler) |
494 | : vals(vals), cfg(cfg), block(block), ac(ac), |
495 | classification(classification), objCNoRet(ac.getASTContext()), |
496 | handler(handler) {} |
497 | |
498 | void reportUse(const Expr *ex, const VarDecl *vd); |
499 | void reportConstRefUse(const Expr *ex, const VarDecl *vd); |
500 | |
501 | void VisitBinaryOperator(BinaryOperator *bo); |
502 | void VisitBlockExpr(BlockExpr *be); |
503 | void VisitCallExpr(CallExpr *ce); |
504 | void VisitDeclRefExpr(DeclRefExpr *dr); |
505 | void VisitDeclStmt(DeclStmt *ds); |
506 | void VisitGCCAsmStmt(GCCAsmStmt *as); |
507 | void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS); |
508 | void VisitObjCMessageExpr(ObjCMessageExpr *ME); |
509 | void VisitOMPExecutableDirective(OMPExecutableDirective *ED); |
510 | |
511 | bool isTrackedVar(const VarDecl *vd) { |
512 | return ::isTrackedVar(vd, dc: cast<DeclContext>(Val: ac.getDecl())); |
513 | } |
514 | |
515 | FindVarResult findVar(const Expr *ex) { |
516 | return ::findVar(E: ex, DC: cast<DeclContext>(Val: ac.getDecl())); |
517 | } |
518 | |
519 | UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) { |
520 | UninitUse Use(ex, isAlwaysUninit(v)); |
521 | |
522 | assert(isUninitialized(v)); |
523 | if (Use.getKind() == UninitUse::Always) |
524 | return Use; |
525 | |
526 | // If an edge which leads unconditionally to this use did not initialize |
527 | // the variable, we can say something stronger than 'may be uninitialized': |
528 | // we can say 'either it's used uninitialized or you have dead code'. |
529 | // |
530 | // We track the number of successors of a node which have been visited, and |
531 | // visit a node once we have visited all of its successors. Only edges where |
532 | // the variable might still be uninitialized are followed. Since a variable |
533 | // can't transfer from being initialized to being uninitialized, this will |
534 | // trace out the subgraph which inevitably leads to the use and does not |
535 | // initialize the variable. We do not want to skip past loops, since their |
536 | // non-termination might be correlated with the initialization condition. |
537 | // |
538 | // For example: |
539 | // |
540 | // void f(bool a, bool b) { |
541 | // block1: int n; |
542 | // if (a) { |
543 | // block2: if (b) |
544 | // block3: n = 1; |
545 | // block4: } else if (b) { |
546 | // block5: while (!a) { |
547 | // block6: do_work(&a); |
548 | // n = 2; |
549 | // } |
550 | // } |
551 | // block7: if (a) |
552 | // block8: g(); |
553 | // block9: return n; |
554 | // } |
555 | // |
556 | // Starting from the maybe-uninitialized use in block 9: |
557 | // * Block 7 is not visited because we have only visited one of its two |
558 | // successors. |
559 | // * Block 8 is visited because we've visited its only successor. |
560 | // From block 8: |
561 | // * Block 7 is visited because we've now visited both of its successors. |
562 | // From block 7: |
563 | // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all |
564 | // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively). |
565 | // * Block 3 is not visited because it initializes 'n'. |
566 | // Now the algorithm terminates, having visited blocks 7 and 8, and having |
567 | // found the frontier is blocks 2, 4, and 5. |
568 | // |
569 | // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2 |
570 | // and 4), so we report that any time either of those edges is taken (in |
571 | // each case when 'b == false'), 'n' is used uninitialized. |
572 | SmallVector<const CFGBlock*, 32> Queue; |
573 | SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0); |
574 | Queue.push_back(Elt: block); |
575 | // Specify that we've already visited all successors of the starting block. |
576 | // This has the dual purpose of ensuring we never add it to the queue, and |
577 | // of marking it as not being a candidate element of the frontier. |
578 | SuccsVisited[block->getBlockID()] = block->succ_size(); |
579 | while (!Queue.empty()) { |
580 | const CFGBlock *B = Queue.pop_back_val(); |
581 | |
582 | // If the use is always reached from the entry block, make a note of that. |
583 | if (B == &cfg.getEntry()) |
584 | Use.setUninitAfterCall(); |
585 | |
586 | for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end(); |
587 | I != E; ++I) { |
588 | const CFGBlock *Pred = *I; |
589 | if (!Pred) |
590 | continue; |
591 | |
592 | Value AtPredExit = vals.getValue(block: Pred, dstBlock: B, vd); |
593 | if (AtPredExit == Initialized) |
594 | // This block initializes the variable. |
595 | continue; |
596 | if (AtPredExit == MayUninitialized && |
597 | vals.getValue(block: B, dstBlock: nullptr, vd) == Uninitialized) { |
598 | // This block declares the variable (uninitialized), and is reachable |
599 | // from a block that initializes the variable. We can't guarantee to |
600 | // give an earlier location for the diagnostic (and it appears that |
601 | // this code is intended to be reachable) so give a diagnostic here |
602 | // and go no further down this path. |
603 | Use.setUninitAfterDecl(); |
604 | continue; |
605 | } |
606 | |
607 | unsigned &SV = SuccsVisited[Pred->getBlockID()]; |
608 | if (!SV) { |
609 | // When visiting the first successor of a block, mark all NULL |
610 | // successors as having been visited. |
611 | for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(), |
612 | SE = Pred->succ_end(); |
613 | SI != SE; ++SI) |
614 | if (!*SI) |
615 | ++SV; |
616 | } |
617 | |
618 | if (++SV == Pred->succ_size()) |
619 | // All paths from this block lead to the use and don't initialize the |
620 | // variable. |
621 | Queue.push_back(Elt: Pred); |
622 | } |
623 | } |
624 | |
625 | // Scan the frontier, looking for blocks where the variable was |
626 | // uninitialized. |
627 | for (const auto *Block : cfg) { |
628 | unsigned BlockID = Block->getBlockID(); |
629 | const Stmt *Term = Block->getTerminatorStmt(); |
630 | if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() && |
631 | Term) { |
632 | // This block inevitably leads to the use. If we have an edge from here |
633 | // to a post-dominator block, and the variable is uninitialized on that |
634 | // edge, we have found a bug. |
635 | for (CFGBlock::const_succ_iterator I = Block->succ_begin(), |
636 | E = Block->succ_end(); I != E; ++I) { |
637 | const CFGBlock *Succ = *I; |
638 | if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() && |
639 | vals.getValue(block: Block, dstBlock: Succ, vd) == Uninitialized) { |
640 | // Switch cases are a special case: report the label to the caller |
641 | // as the 'terminator', not the switch statement itself. Suppress |
642 | // situations where no label matched: we can't be sure that's |
643 | // possible. |
644 | if (isa<SwitchStmt>(Val: Term)) { |
645 | const Stmt *Label = Succ->getLabel(); |
646 | if (!Label || !isa<SwitchCase>(Val: Label)) |
647 | // Might not be possible. |
648 | continue; |
649 | UninitUse::Branch Branch; |
650 | Branch.Terminator = Label; |
651 | Branch.Output = 0; // Ignored. |
652 | Use.addUninitBranch(B: Branch); |
653 | } else { |
654 | UninitUse::Branch Branch; |
655 | Branch.Terminator = Term; |
656 | Branch.Output = I - Block->succ_begin(); |
657 | Use.addUninitBranch(B: Branch); |
658 | } |
659 | } |
660 | } |
661 | } |
662 | } |
663 | |
664 | return Use; |
665 | } |
666 | }; |
667 | |
668 | } // namespace |
669 | |
670 | void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) { |
671 | Value v = vals[vd]; |
672 | if (isUninitialized(v)) |
673 | handler.handleUseOfUninitVariable(vd, use: getUninitUse(ex, vd, v)); |
674 | } |
675 | |
676 | void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) { |
677 | Value v = vals[vd]; |
678 | if (isAlwaysUninit(v)) |
679 | handler.handleConstRefUseOfUninitVariable(vd, use: getUninitUse(ex, vd, v)); |
680 | } |
681 | |
682 | void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) { |
683 | // This represents an initialization of the 'element' value. |
684 | if (const auto *DS = dyn_cast<DeclStmt>(Val: FS->getElement())) { |
685 | const auto *VD = cast<VarDecl>(Val: DS->getSingleDecl()); |
686 | if (isTrackedVar(vd: VD)) |
687 | vals[VD] = Initialized; |
688 | } |
689 | } |
690 | |
691 | void TransferFunctions::VisitOMPExecutableDirective( |
692 | OMPExecutableDirective *ED) { |
693 | for (Stmt *S : OMPExecutableDirective::used_clauses_children(Clauses: ED->clauses())) { |
694 | assert(S && "Expected non-null used-in-clause child." ); |
695 | Visit(S); |
696 | } |
697 | if (!ED->isStandaloneDirective()) |
698 | Visit(S: ED->getStructuredBlock()); |
699 | } |
700 | |
701 | void TransferFunctions::VisitBlockExpr(BlockExpr *be) { |
702 | const BlockDecl *bd = be->getBlockDecl(); |
703 | for (const auto &I : bd->captures()) { |
704 | const VarDecl *vd = I.getVariable(); |
705 | if (!isTrackedVar(vd)) |
706 | continue; |
707 | if (I.isByRef()) { |
708 | vals[vd] = Initialized; |
709 | continue; |
710 | } |
711 | reportUse(ex: be, vd); |
712 | } |
713 | } |
714 | |
715 | void TransferFunctions::VisitCallExpr(CallExpr *ce) { |
716 | if (Decl *Callee = ce->getCalleeDecl()) { |
717 | if (Callee->hasAttr<ReturnsTwiceAttr>()) { |
718 | // After a call to a function like setjmp or vfork, any variable which is |
719 | // initialized anywhere within this function may now be initialized. For |
720 | // now, just assume such a call initializes all variables. FIXME: Only |
721 | // mark variables as initialized if they have an initializer which is |
722 | // reachable from here. |
723 | vals.setAllScratchValues(Initialized); |
724 | } |
725 | else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) { |
726 | // Functions labeled like "analyzer_noreturn" are often used to denote |
727 | // "panic" functions that in special debug situations can still return, |
728 | // but for the most part should not be treated as returning. This is a |
729 | // useful annotation borrowed from the static analyzer that is useful for |
730 | // suppressing branch-specific false positives when we call one of these |
731 | // functions but keep pretending the path continues (when in reality the |
732 | // user doesn't care). |
733 | vals.setAllScratchValues(Unknown); |
734 | } |
735 | } |
736 | } |
737 | |
738 | void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { |
739 | switch (classification.get(DRE: dr)) { |
740 | case ClassifyRefs::Ignore: |
741 | break; |
742 | case ClassifyRefs::Use: |
743 | reportUse(ex: dr, vd: cast<VarDecl>(Val: dr->getDecl())); |
744 | break; |
745 | case ClassifyRefs::Init: |
746 | vals[cast<VarDecl>(Val: dr->getDecl())] = Initialized; |
747 | break; |
748 | case ClassifyRefs::SelfInit: |
749 | handler.handleSelfInit(vd: cast<VarDecl>(Val: dr->getDecl())); |
750 | break; |
751 | case ClassifyRefs::ConstRefUse: |
752 | reportConstRefUse(ex: dr, vd: cast<VarDecl>(Val: dr->getDecl())); |
753 | break; |
754 | } |
755 | } |
756 | |
757 | void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) { |
758 | if (BO->getOpcode() == BO_Assign) { |
759 | FindVarResult Var = findVar(ex: BO->getLHS()); |
760 | if (const VarDecl *VD = Var.getDecl()) |
761 | vals[VD] = Initialized; |
762 | } |
763 | } |
764 | |
765 | void TransferFunctions::VisitDeclStmt(DeclStmt *DS) { |
766 | for (auto *DI : DS->decls()) { |
767 | auto *VD = dyn_cast<VarDecl>(Val: DI); |
768 | if (VD && isTrackedVar(vd: VD)) { |
769 | if (getSelfInitExpr(VD)) { |
770 | // If the initializer consists solely of a reference to itself, we |
771 | // explicitly mark the variable as uninitialized. This allows code |
772 | // like the following: |
773 | // |
774 | // int x = x; |
775 | // |
776 | // to deliberately leave a variable uninitialized. Different analysis |
777 | // clients can detect this pattern and adjust their reporting |
778 | // appropriately, but we need to continue to analyze subsequent uses |
779 | // of the variable. |
780 | vals[VD] = Uninitialized; |
781 | } else if (VD->getInit()) { |
782 | // Treat the new variable as initialized. |
783 | vals[VD] = Initialized; |
784 | } else { |
785 | // No initializer: the variable is now uninitialized. This matters |
786 | // for cases like: |
787 | // while (...) { |
788 | // int n; |
789 | // use(n); |
790 | // n = 0; |
791 | // } |
792 | // FIXME: Mark the variable as uninitialized whenever its scope is |
793 | // left, since its scope could be re-entered by a jump over the |
794 | // declaration. |
795 | vals[VD] = Uninitialized; |
796 | } |
797 | } |
798 | } |
799 | } |
800 | |
801 | void TransferFunctions::VisitGCCAsmStmt(GCCAsmStmt *as) { |
802 | // An "asm goto" statement is a terminator that may initialize some variables. |
803 | if (!as->isAsmGoto()) |
804 | return; |
805 | |
806 | ASTContext &C = ac.getASTContext(); |
807 | for (const Expr *O : as->outputs()) { |
808 | const Expr *Ex = stripCasts(C, Ex: O); |
809 | |
810 | // Strip away any unary operators. Invalid l-values are reported by other |
811 | // semantic analysis passes. |
812 | while (const auto *UO = dyn_cast<UnaryOperator>(Val: Ex)) |
813 | Ex = stripCasts(C, Ex: UO->getSubExpr()); |
814 | |
815 | // Mark the variable as potentially uninitialized for those cases where |
816 | // it's used on an indirect path, where it's not guaranteed to be |
817 | // defined. |
818 | if (const VarDecl *VD = findVar(ex: Ex).getDecl()) |
819 | if (vals[VD] != Initialized) |
820 | vals[VD] = MayUninitialized; |
821 | } |
822 | } |
823 | |
824 | void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) { |
825 | // If the Objective-C message expression is an implicit no-return that |
826 | // is not modeled in the CFG, set the tracked dataflow values to Unknown. |
827 | if (objCNoRet.isImplicitNoReturn(ME)) { |
828 | vals.setAllScratchValues(Unknown); |
829 | } |
830 | } |
831 | |
832 | //------------------------------------------------------------------------====// |
833 | // High-level "driver" logic for uninitialized values analysis. |
834 | //====------------------------------------------------------------------------// |
835 | |
836 | static bool runOnBlock(const CFGBlock *block, const CFG &cfg, |
837 | AnalysisDeclContext &ac, CFGBlockValues &vals, |
838 | const ClassifyRefs &classification, |
839 | llvm::BitVector &wasAnalyzed, |
840 | UninitVariablesHandler &handler) { |
841 | wasAnalyzed[block->getBlockID()] = true; |
842 | vals.resetScratch(); |
843 | // Merge in values of predecessor blocks. |
844 | bool isFirst = true; |
845 | for (CFGBlock::const_pred_iterator I = block->pred_begin(), |
846 | E = block->pred_end(); I != E; ++I) { |
847 | const CFGBlock *pred = *I; |
848 | if (!pred) |
849 | continue; |
850 | if (wasAnalyzed[pred->getBlockID()]) { |
851 | vals.mergeIntoScratch(source: vals.getValueVector(block: pred), isFirst); |
852 | isFirst = false; |
853 | } |
854 | } |
855 | // Apply the transfer function. |
856 | TransferFunctions tf(vals, cfg, block, ac, classification, handler); |
857 | for (const auto &I : *block) { |
858 | if (std::optional<CFGStmt> cs = I.getAs<CFGStmt>()) |
859 | tf.Visit(S: const_cast<Stmt *>(cs->getStmt())); |
860 | } |
861 | CFGTerminator terminator = block->getTerminator(); |
862 | if (auto *as = dyn_cast_or_null<GCCAsmStmt>(Val: terminator.getStmt())) |
863 | if (as->isAsmGoto()) |
864 | tf.Visit(S: as); |
865 | return vals.updateValueVectorWithScratch(block); |
866 | } |
867 | |
868 | namespace { |
869 | |
870 | /// PruneBlocksHandler is a special UninitVariablesHandler that is used |
871 | /// to detect when a CFGBlock has any *potential* use of an uninitialized |
872 | /// variable. It is mainly used to prune out work during the final |
873 | /// reporting pass. |
874 | struct PruneBlocksHandler : public UninitVariablesHandler { |
875 | /// Records if a CFGBlock had a potential use of an uninitialized variable. |
876 | llvm::BitVector hadUse; |
877 | |
878 | /// Records if any CFGBlock had a potential use of an uninitialized variable. |
879 | bool hadAnyUse = false; |
880 | |
881 | /// The current block to scribble use information. |
882 | unsigned currentBlock = 0; |
883 | |
884 | PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {} |
885 | |
886 | ~PruneBlocksHandler() override = default; |
887 | |
888 | void handleUseOfUninitVariable(const VarDecl *vd, |
889 | const UninitUse &use) override { |
890 | hadUse[currentBlock] = true; |
891 | hadAnyUse = true; |
892 | } |
893 | |
894 | void handleConstRefUseOfUninitVariable(const VarDecl *vd, |
895 | const UninitUse &use) override { |
896 | hadUse[currentBlock] = true; |
897 | hadAnyUse = true; |
898 | } |
899 | |
900 | /// Called when the uninitialized variable analysis detects the |
901 | /// idiom 'int x = x'. All other uses of 'x' within the initializer |
902 | /// are handled by handleUseOfUninitVariable. |
903 | void handleSelfInit(const VarDecl *vd) override { |
904 | hadUse[currentBlock] = true; |
905 | hadAnyUse = true; |
906 | } |
907 | }; |
908 | |
909 | } // namespace |
910 | |
911 | void clang::runUninitializedVariablesAnalysis( |
912 | const DeclContext &dc, |
913 | const CFG &cfg, |
914 | AnalysisDeclContext &ac, |
915 | UninitVariablesHandler &handler, |
916 | UninitVariablesAnalysisStats &stats) { |
917 | CFGBlockValues vals(cfg); |
918 | vals.computeSetOfDeclarations(dc); |
919 | if (vals.hasNoDeclarations()) |
920 | return; |
921 | |
922 | stats.NumVariablesAnalyzed = vals.getNumEntries(); |
923 | |
924 | // Precompute which expressions are uses and which are initializations. |
925 | ClassifyRefs classification(ac); |
926 | cfg.VisitBlockStmts(O&: classification); |
927 | |
928 | // Mark all variables uninitialized at the entry. |
929 | const CFGBlock &entry = cfg.getEntry(); |
930 | ValueVector &vec = vals.getValueVector(block: &entry); |
931 | const unsigned n = vals.getNumEntries(); |
932 | for (unsigned j = 0; j < n; ++j) { |
933 | vec[j] = Uninitialized; |
934 | } |
935 | |
936 | // Proceed with the workist. |
937 | ForwardDataflowWorklist worklist(cfg, ac); |
938 | llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); |
939 | worklist.enqueueSuccessors(Block: &cfg.getEntry()); |
940 | llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); |
941 | wasAnalyzed[cfg.getEntry().getBlockID()] = true; |
942 | PruneBlocksHandler PBH(cfg.getNumBlockIDs()); |
943 | |
944 | while (const CFGBlock *block = worklist.dequeue()) { |
945 | PBH.currentBlock = block->getBlockID(); |
946 | |
947 | // Did the block change? |
948 | bool changed = runOnBlock(block, cfg, ac, vals, |
949 | classification, wasAnalyzed, handler&: PBH); |
950 | ++stats.NumBlockVisits; |
951 | if (changed || !previouslyVisited[block->getBlockID()]) |
952 | worklist.enqueueSuccessors(Block: block); |
953 | previouslyVisited[block->getBlockID()] = true; |
954 | } |
955 | |
956 | if (!PBH.hadAnyUse) |
957 | return; |
958 | |
959 | // Run through the blocks one more time, and report uninitialized variables. |
960 | for (const auto *block : cfg) |
961 | if (PBH.hadUse[block->getBlockID()]) { |
962 | runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler); |
963 | ++stats.NumBlockVisits; |
964 | } |
965 | } |
966 | |
967 | UninitVariablesHandler::~UninitVariablesHandler() = default; |
968 | |