| 1 | //===- UninitializedValues.cpp - Find Uninitialized Values ----------------===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements uninitialized values analysis for source-level CFGs. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
| 13 | #include "clang/Analysis/Analyses/UninitializedValues.h" |
| 14 | #include "clang/AST/Attr.h" |
| 15 | #include "clang/AST/Decl.h" |
| 16 | #include "clang/AST/DeclBase.h" |
| 17 | #include "clang/AST/Expr.h" |
| 18 | #include "clang/AST/OperationKinds.h" |
| 19 | #include "clang/AST/Stmt.h" |
| 20 | #include "clang/AST/StmtObjC.h" |
| 21 | #include "clang/AST/StmtVisitor.h" |
| 22 | #include "clang/AST/Type.h" |
| 23 | #include "clang/Analysis/AnalysisDeclContext.h" |
| 24 | #include "clang/Analysis/CFG.h" |
| 25 | #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h" |
| 26 | #include "clang/Analysis/FlowSensitive/DataflowWorklist.h" |
| 27 | #include "clang/Basic/LLVM.h" |
| 28 | #include "llvm/ADT/BitVector.h" |
| 29 | #include "llvm/ADT/DenseMap.h" |
| 30 | #include "llvm/ADT/PackedVector.h" |
| 31 | #include "llvm/ADT/SmallBitVector.h" |
| 32 | #include "llvm/ADT/SmallVector.h" |
| 33 | #include <algorithm> |
| 34 | #include <cassert> |
| 35 | #include <optional> |
| 36 | |
| 37 | using namespace clang; |
| 38 | |
| 39 | #define DEBUG_LOGGING 0 |
| 40 | |
| 41 | static bool recordIsNotEmpty(const RecordDecl *RD) { |
| 42 | // We consider a record decl to be empty if it contains only unnamed bit- |
| 43 | // fields, zero-width fields, and fields of empty record type. |
| 44 | for (const auto *FD : RD->fields()) { |
| 45 | if (FD->isUnnamedBitField()) |
| 46 | continue; |
| 47 | if (FD->isZeroSize(Ctx: FD->getASTContext())) |
| 48 | continue; |
| 49 | // The only case remaining to check is for a field declaration of record |
| 50 | // type and whether that record itself is empty. |
| 51 | if (const auto *FieldRD = FD->getType()->getAsRecordDecl(); |
| 52 | !FieldRD || recordIsNotEmpty(RD: FieldRD)) |
| 53 | return true; |
| 54 | } |
| 55 | return false; |
| 56 | } |
| 57 | |
| 58 | static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { |
| 59 | if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && |
| 60 | !vd->isExceptionVariable() && !vd->isInitCapture() && !vd->isImplicit() && |
| 61 | vd->getDeclContext() == dc) { |
| 62 | QualType ty = vd->getType(); |
| 63 | if (const auto *RD = ty->getAsRecordDecl()) |
| 64 | return recordIsNotEmpty(RD); |
| 65 | return ty->isScalarType() || ty->isVectorType() || ty->isRVVSizelessBuiltinType(); |
| 66 | } |
| 67 | return false; |
| 68 | } |
| 69 | |
| 70 | //------------------------------------------------------------------------====// |
| 71 | // DeclToIndex: a mapping from Decls we track to value indices. |
| 72 | //====------------------------------------------------------------------------// |
| 73 | |
| 74 | namespace { |
| 75 | |
| 76 | class DeclToIndex { |
| 77 | llvm::DenseMap<const VarDecl *, unsigned> map; |
| 78 | |
| 79 | public: |
| 80 | DeclToIndex() = default; |
| 81 | |
| 82 | /// Compute the actual mapping from declarations to bits. |
| 83 | void computeMap(const DeclContext &dc); |
| 84 | |
| 85 | /// Return the number of declarations in the map. |
| 86 | unsigned size() const { return map.size(); } |
| 87 | |
| 88 | /// Returns the bit vector index for a given declaration. |
| 89 | std::optional<unsigned> getValueIndex(const VarDecl *d) const; |
| 90 | }; |
| 91 | |
| 92 | } // namespace |
| 93 | |
| 94 | void DeclToIndex::computeMap(const DeclContext &dc) { |
| 95 | unsigned count = 0; |
| 96 | DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), |
| 97 | E(dc.decls_end()); |
| 98 | for ( ; I != E; ++I) { |
| 99 | const VarDecl *vd = *I; |
| 100 | if (isTrackedVar(vd, dc: &dc)) |
| 101 | map[vd] = count++; |
| 102 | } |
| 103 | } |
| 104 | |
| 105 | std::optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { |
| 106 | llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(Val: d); |
| 107 | if (I == map.end()) |
| 108 | return std::nullopt; |
| 109 | return I->second; |
| 110 | } |
| 111 | |
| 112 | //------------------------------------------------------------------------====// |
| 113 | // CFGBlockValues: dataflow values for CFG blocks. |
| 114 | //====------------------------------------------------------------------------// |
| 115 | |
| 116 | // These values are defined in such a way that a merge can be done using |
| 117 | // a bitwise OR. |
| 118 | enum Value { Unknown = 0x0, /* 00 */ |
| 119 | Initialized = 0x1, /* 01 */ |
| 120 | Uninitialized = 0x2, /* 10 */ |
| 121 | MayUninitialized = 0x3 /* 11 */ }; |
| 122 | |
| 123 | static bool isUninitialized(const Value v) { |
| 124 | return v >= Uninitialized; |
| 125 | } |
| 126 | |
| 127 | static bool isAlwaysUninit(const Value v) { |
| 128 | return v == Uninitialized; |
| 129 | } |
| 130 | |
| 131 | namespace { |
| 132 | |
| 133 | using ValueVector = llvm::PackedVector<Value, 2, llvm::SmallBitVector>; |
| 134 | |
| 135 | class CFGBlockValues { |
| 136 | const CFG &cfg; |
| 137 | SmallVector<ValueVector, 8> vals; |
| 138 | ValueVector scratch; |
| 139 | DeclToIndex declToIndex; |
| 140 | |
| 141 | public: |
| 142 | CFGBlockValues(const CFG &cfg); |
| 143 | |
| 144 | unsigned getNumEntries() const { return declToIndex.size(); } |
| 145 | |
| 146 | void computeSetOfDeclarations(const DeclContext &dc); |
| 147 | |
| 148 | ValueVector &getValueVector(const CFGBlock *block) { |
| 149 | return vals[block->getBlockID()]; |
| 150 | } |
| 151 | |
| 152 | void setAllScratchValues(Value V); |
| 153 | void mergeIntoScratch(ValueVector const &source, bool isFirst); |
| 154 | bool updateValueVectorWithScratch(const CFGBlock *block); |
| 155 | |
| 156 | bool hasNoDeclarations() const { |
| 157 | return declToIndex.size() == 0; |
| 158 | } |
| 159 | |
| 160 | void resetScratch(); |
| 161 | |
| 162 | ValueVector::reference operator[](const VarDecl *vd); |
| 163 | |
| 164 | Value getValue(const CFGBlock *block, const VarDecl *vd) { |
| 165 | std::optional<unsigned> idx = declToIndex.getValueIndex(d: vd); |
| 166 | return getValueVector(block)[*idx]; |
| 167 | } |
| 168 | }; |
| 169 | |
| 170 | } // namespace |
| 171 | |
| 172 | CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {} |
| 173 | |
| 174 | void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { |
| 175 | declToIndex.computeMap(dc); |
| 176 | unsigned decls = declToIndex.size(); |
| 177 | scratch.resize(N: decls); |
| 178 | unsigned n = cfg.getNumBlockIDs(); |
| 179 | if (!n) |
| 180 | return; |
| 181 | vals.resize(N: n); |
| 182 | for (auto &val : vals) |
| 183 | val.resize(N: decls); |
| 184 | } |
| 185 | |
| 186 | #if DEBUG_LOGGING |
| 187 | static void printVector(const CFGBlock *block, ValueVector &bv, |
| 188 | unsigned num) { |
| 189 | llvm::errs() << block->getBlockID() << " :" ; |
| 190 | for (const auto &i : bv) |
| 191 | llvm::errs() << ' ' << i; |
| 192 | llvm::errs() << " : " << num << '\n'; |
| 193 | } |
| 194 | #endif |
| 195 | |
| 196 | void CFGBlockValues::setAllScratchValues(Value V) { |
| 197 | for (unsigned I = 0, E = scratch.size(); I != E; ++I) |
| 198 | scratch[I] = V; |
| 199 | } |
| 200 | |
| 201 | void CFGBlockValues::mergeIntoScratch(ValueVector const &source, |
| 202 | bool isFirst) { |
| 203 | if (isFirst) |
| 204 | scratch = source; |
| 205 | else |
| 206 | scratch |= source; |
| 207 | } |
| 208 | |
| 209 | bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { |
| 210 | ValueVector &dst = getValueVector(block); |
| 211 | bool changed = (dst != scratch); |
| 212 | if (changed) |
| 213 | dst = scratch; |
| 214 | #if DEBUG_LOGGING |
| 215 | printVector(block, scratch, 0); |
| 216 | #endif |
| 217 | return changed; |
| 218 | } |
| 219 | |
| 220 | void CFGBlockValues::resetScratch() { |
| 221 | scratch.reset(); |
| 222 | } |
| 223 | |
| 224 | ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { |
| 225 | return scratch[*declToIndex.getValueIndex(d: vd)]; |
| 226 | } |
| 227 | |
| 228 | //------------------------------------------------------------------------====// |
| 229 | // Classification of DeclRefExprs as use or initialization. |
| 230 | //====------------------------------------------------------------------------// |
| 231 | |
| 232 | namespace { |
| 233 | |
| 234 | class FindVarResult { |
| 235 | const VarDecl *vd; |
| 236 | const DeclRefExpr *dr; |
| 237 | |
| 238 | public: |
| 239 | FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {} |
| 240 | |
| 241 | const DeclRefExpr *getDeclRefExpr() const { return dr; } |
| 242 | const VarDecl *getDecl() const { return vd; } |
| 243 | }; |
| 244 | |
| 245 | } // namespace |
| 246 | |
| 247 | static const Expr *stripCasts(ASTContext &C, const Expr *Ex) { |
| 248 | while (Ex) { |
| 249 | Ex = Ex->IgnoreParenNoopCasts(Ctx: C); |
| 250 | if (const auto *CE = dyn_cast<CastExpr>(Val: Ex)) { |
| 251 | if (CE->getCastKind() == CK_LValueBitCast) { |
| 252 | Ex = CE->getSubExpr(); |
| 253 | continue; |
| 254 | } |
| 255 | } |
| 256 | break; |
| 257 | } |
| 258 | return Ex; |
| 259 | } |
| 260 | |
| 261 | /// If E is an expression comprising a reference to a single variable, find that |
| 262 | /// variable. |
| 263 | static FindVarResult findVar(const Expr *E, const DeclContext *DC) { |
| 264 | if (const auto *DRE = |
| 265 | dyn_cast<DeclRefExpr>(Val: stripCasts(C&: DC->getParentASTContext(), Ex: E))) |
| 266 | if (const auto *VD = dyn_cast<VarDecl>(Val: DRE->getDecl())) |
| 267 | if (isTrackedVar(vd: VD, dc: DC)) |
| 268 | return FindVarResult(VD, DRE); |
| 269 | return FindVarResult(nullptr, nullptr); |
| 270 | } |
| 271 | |
| 272 | namespace { |
| 273 | |
| 274 | /// Classify each DeclRefExpr as an initialization or a use. Any |
| 275 | /// DeclRefExpr which isn't explicitly classified will be assumed to have |
| 276 | /// escaped the analysis and will be treated as an initialization. |
| 277 | class ClassifyRefs : public ConstStmtVisitor<ClassifyRefs> { |
| 278 | public: |
| 279 | enum Class { Init, Use, SelfInit, ConstRefUse, ConstPtrUse, Ignore }; |
| 280 | |
| 281 | private: |
| 282 | const DeclContext *DC; |
| 283 | llvm::DenseMap<const DeclRefExpr *, Class> Classification; |
| 284 | |
| 285 | bool isTrackedVar(const VarDecl *VD) const { |
| 286 | return ::isTrackedVar(vd: VD, dc: DC); |
| 287 | } |
| 288 | |
| 289 | void classify(const Expr *E, Class C); |
| 290 | |
| 291 | public: |
| 292 | ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(Val: AC.getDecl())) {} |
| 293 | |
| 294 | void VisitDeclStmt(const DeclStmt *DS); |
| 295 | void VisitUnaryOperator(const UnaryOperator *UO); |
| 296 | void VisitBinaryOperator(const BinaryOperator *BO); |
| 297 | void VisitCallExpr(const CallExpr *CE); |
| 298 | void VisitCastExpr(const CastExpr *CE); |
| 299 | void VisitOMPExecutableDirective(const OMPExecutableDirective *ED); |
| 300 | |
| 301 | void operator()(const Stmt *S) { Visit(S); } |
| 302 | |
| 303 | Class get(const DeclRefExpr *DRE) const { |
| 304 | llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I |
| 305 | = Classification.find(Val: DRE); |
| 306 | if (I != Classification.end()) |
| 307 | return I->second; |
| 308 | |
| 309 | const auto *VD = dyn_cast<VarDecl>(Val: DRE->getDecl()); |
| 310 | if (!VD || !isTrackedVar(VD)) |
| 311 | return Ignore; |
| 312 | |
| 313 | return Init; |
| 314 | } |
| 315 | }; |
| 316 | |
| 317 | } // namespace |
| 318 | |
| 319 | static const DeclRefExpr *getSelfInitExpr(const VarDecl *VD) { |
| 320 | if (VD->getType()->isRecordType()) |
| 321 | return nullptr; |
| 322 | if (const Expr *Init = VD->getInit()) { |
| 323 | const auto *DRE = |
| 324 | dyn_cast<DeclRefExpr>(Val: stripCasts(C&: VD->getASTContext(), Ex: Init)); |
| 325 | if (DRE && DRE->getDecl() == VD) |
| 326 | return DRE; |
| 327 | } |
| 328 | return nullptr; |
| 329 | } |
| 330 | |
| 331 | void ClassifyRefs::classify(const Expr *E, Class C) { |
| 332 | // The result of a ?: could also be an lvalue. |
| 333 | E = E->IgnoreParens(); |
| 334 | if (const auto *CO = dyn_cast<ConditionalOperator>(Val: E)) { |
| 335 | classify(E: CO->getTrueExpr(), C); |
| 336 | classify(E: CO->getFalseExpr(), C); |
| 337 | return; |
| 338 | } |
| 339 | |
| 340 | if (const auto *BCO = dyn_cast<BinaryConditionalOperator>(Val: E)) { |
| 341 | classify(E: BCO->getFalseExpr(), C); |
| 342 | return; |
| 343 | } |
| 344 | |
| 345 | if (const auto *OVE = dyn_cast<OpaqueValueExpr>(Val: E)) { |
| 346 | classify(E: OVE->getSourceExpr(), C); |
| 347 | return; |
| 348 | } |
| 349 | |
| 350 | if (const auto *ME = dyn_cast<MemberExpr>(Val: E)) { |
| 351 | if (const auto *VD = dyn_cast<VarDecl>(Val: ME->getMemberDecl())) { |
| 352 | if (!VD->isStaticDataMember()) |
| 353 | classify(E: ME->getBase(), C); |
| 354 | } |
| 355 | return; |
| 356 | } |
| 357 | |
| 358 | if (const auto *BO = dyn_cast<BinaryOperator>(Val: E)) { |
| 359 | switch (BO->getOpcode()) { |
| 360 | case BO_PtrMemD: |
| 361 | case BO_PtrMemI: |
| 362 | classify(E: BO->getLHS(), C); |
| 363 | return; |
| 364 | case BO_Comma: |
| 365 | classify(E: BO->getRHS(), C); |
| 366 | return; |
| 367 | default: |
| 368 | return; |
| 369 | } |
| 370 | } |
| 371 | |
| 372 | FindVarResult Var = findVar(E, DC); |
| 373 | if (const DeclRefExpr *DRE = Var.getDeclRefExpr()) { |
| 374 | auto &Class = Classification[DRE]; |
| 375 | Class = std::max(a: Class, b: C); |
| 376 | } |
| 377 | } |
| 378 | |
| 379 | void ClassifyRefs::VisitDeclStmt(const DeclStmt *DS) { |
| 380 | for (auto *DI : DS->decls()) { |
| 381 | auto *VD = dyn_cast<VarDecl>(Val: DI); |
| 382 | if (VD && isTrackedVar(VD)) |
| 383 | if (const DeclRefExpr *DRE = getSelfInitExpr(VD)) |
| 384 | Classification[DRE] = SelfInit; |
| 385 | } |
| 386 | } |
| 387 | |
| 388 | void ClassifyRefs::VisitBinaryOperator(const BinaryOperator *BO) { |
| 389 | // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this |
| 390 | // is not a compound-assignment, we will treat it as initializing the variable |
| 391 | // when TransferFunctions visits it. A compound-assignment does not affect |
| 392 | // whether a variable is uninitialized, and there's no point counting it as a |
| 393 | // use. |
| 394 | if (BO->isCompoundAssignmentOp()) |
| 395 | classify(E: BO->getLHS(), C: Use); |
| 396 | else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma) |
| 397 | classify(E: BO->getLHS(), C: Ignore); |
| 398 | } |
| 399 | |
| 400 | void ClassifyRefs::VisitUnaryOperator(const UnaryOperator *UO) { |
| 401 | // Increment and decrement are uses despite there being no lvalue-to-rvalue |
| 402 | // conversion. |
| 403 | if (UO->isIncrementDecrementOp()) |
| 404 | classify(E: UO->getSubExpr(), C: Use); |
| 405 | } |
| 406 | |
| 407 | void ClassifyRefs::VisitOMPExecutableDirective( |
| 408 | const OMPExecutableDirective *ED) { |
| 409 | for (Stmt *S : OMPExecutableDirective::used_clauses_children(Clauses: ED->clauses())) |
| 410 | classify(E: cast<Expr>(Val: S), C: Use); |
| 411 | } |
| 412 | |
| 413 | static bool isPointerToConst(const QualType &QT) { |
| 414 | return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified(); |
| 415 | } |
| 416 | |
| 417 | static bool hasTrivialBody(const CallExpr *CE) { |
| 418 | if (const FunctionDecl *FD = CE->getDirectCallee()) { |
| 419 | if (const FunctionTemplateDecl *FTD = FD->getPrimaryTemplate()) |
| 420 | return FTD->getTemplatedDecl()->hasTrivialBody(); |
| 421 | return FD->hasTrivialBody(); |
| 422 | } |
| 423 | return false; |
| 424 | } |
| 425 | |
| 426 | void ClassifyRefs::VisitCallExpr(const CallExpr *CE) { |
| 427 | // Classify arguments to std::move as used. |
| 428 | if (CE->isCallToStdMove()) { |
| 429 | // RecordTypes are handled in SemaDeclCXX.cpp. |
| 430 | if (!CE->getArg(Arg: 0)->getType()->isRecordType()) |
| 431 | classify(E: CE->getArg(Arg: 0), C: Use); |
| 432 | return; |
| 433 | } |
| 434 | bool isTrivialBody = hasTrivialBody(CE); |
| 435 | // If a value is passed by const pointer to a function, |
| 436 | // we should not assume that it is initialized by the call, and we |
| 437 | // conservatively do not assume that it is used. |
| 438 | // If a value is passed by const reference to a function, |
| 439 | // it should already be initialized. |
| 440 | for (const Expr *Argument : CE->arguments()) { |
| 441 | if (Argument->isGLValue()) { |
| 442 | if (Argument->getType().isConstQualified()) |
| 443 | classify(E: Argument, C: isTrivialBody ? Ignore : ConstRefUse); |
| 444 | } else if (isPointerToConst(QT: Argument->getType())) { |
| 445 | const Expr *Ex = stripCasts(C&: DC->getParentASTContext(), Ex: Argument); |
| 446 | const auto *UO = dyn_cast<UnaryOperator>(Val: Ex); |
| 447 | if (UO && UO->getOpcode() == UO_AddrOf) |
| 448 | classify(E: UO->getSubExpr(), C: isTrivialBody ? Ignore : ConstPtrUse); |
| 449 | } |
| 450 | } |
| 451 | } |
| 452 | |
| 453 | void ClassifyRefs::VisitCastExpr(const CastExpr *CE) { |
| 454 | if (CE->getCastKind() == CK_LValueToRValue) |
| 455 | classify(E: CE->getSubExpr(), C: Use); |
| 456 | else if (const auto *CSE = dyn_cast<CStyleCastExpr>(Val: CE)) { |
| 457 | if (CSE->getType()->isVoidType()) { |
| 458 | // Squelch any detected load of an uninitialized value if |
| 459 | // we cast it to void. |
| 460 | // e.g. (void) x; |
| 461 | classify(E: CSE->getSubExpr(), C: Ignore); |
| 462 | } |
| 463 | } |
| 464 | } |
| 465 | |
| 466 | //------------------------------------------------------------------------====// |
| 467 | // Transfer function for uninitialized values analysis. |
| 468 | //====------------------------------------------------------------------------// |
| 469 | |
| 470 | namespace { |
| 471 | |
| 472 | class TransferFunctions : public ConstStmtVisitor<TransferFunctions> { |
| 473 | CFGBlockValues &vals; |
| 474 | const CFG &cfg; |
| 475 | const CFGBlock *block; |
| 476 | AnalysisDeclContext ∾ |
| 477 | const ClassifyRefs &classification; |
| 478 | ObjCNoReturn objCNoRet; |
| 479 | UninitVariablesHandler &handler; |
| 480 | |
| 481 | public: |
| 482 | TransferFunctions(CFGBlockValues &vals, const CFG &cfg, |
| 483 | const CFGBlock *block, AnalysisDeclContext &ac, |
| 484 | const ClassifyRefs &classification, |
| 485 | UninitVariablesHandler &handler) |
| 486 | : vals(vals), cfg(cfg), block(block), ac(ac), |
| 487 | classification(classification), objCNoRet(ac.getASTContext()), |
| 488 | handler(handler) {} |
| 489 | |
| 490 | void reportUse(const Expr *ex, const VarDecl *vd); |
| 491 | void reportConstRefUse(const Expr *ex, const VarDecl *vd); |
| 492 | void reportConstPtrUse(const Expr *ex, const VarDecl *vd); |
| 493 | |
| 494 | void VisitBinaryOperator(const BinaryOperator *bo); |
| 495 | void VisitBlockExpr(const BlockExpr *be); |
| 496 | void VisitCallExpr(const CallExpr *ce); |
| 497 | void VisitDeclRefExpr(const DeclRefExpr *dr); |
| 498 | void VisitDeclStmt(const DeclStmt *ds); |
| 499 | void VisitGCCAsmStmt(const GCCAsmStmt *as); |
| 500 | void VisitObjCForCollectionStmt(const ObjCForCollectionStmt *FS); |
| 501 | void VisitObjCMessageExpr(const ObjCMessageExpr *ME); |
| 502 | void VisitOMPExecutableDirective(const OMPExecutableDirective *ED); |
| 503 | |
| 504 | bool isTrackedVar(const VarDecl *vd) { |
| 505 | return ::isTrackedVar(vd, dc: cast<DeclContext>(Val: ac.getDecl())); |
| 506 | } |
| 507 | |
| 508 | FindVarResult findVar(const Expr *ex) { |
| 509 | return ::findVar(E: ex, DC: cast<DeclContext>(Val: ac.getDecl())); |
| 510 | } |
| 511 | |
| 512 | UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) { |
| 513 | UninitUse Use(ex, isAlwaysUninit(v)); |
| 514 | |
| 515 | assert(isUninitialized(v)); |
| 516 | if (Use.getKind() == UninitUse::Always) |
| 517 | return Use; |
| 518 | |
| 519 | // If an edge which leads unconditionally to this use did not initialize |
| 520 | // the variable, we can say something stronger than 'may be uninitialized': |
| 521 | // we can say 'either it's used uninitialized or you have dead code'. |
| 522 | // |
| 523 | // We track the number of successors of a node which have been visited, and |
| 524 | // visit a node once we have visited all of its successors. Only edges where |
| 525 | // the variable might still be uninitialized are followed. Since a variable |
| 526 | // can't transfer from being initialized to being uninitialized, this will |
| 527 | // trace out the subgraph which inevitably leads to the use and does not |
| 528 | // initialize the variable. We do not want to skip past loops, since their |
| 529 | // non-termination might be correlated with the initialization condition. |
| 530 | // |
| 531 | // For example: |
| 532 | // |
| 533 | // void f(bool a, bool b) { |
| 534 | // block1: int n; |
| 535 | // if (a) { |
| 536 | // block2: if (b) |
| 537 | // block3: n = 1; |
| 538 | // block4: } else if (b) { |
| 539 | // block5: while (!a) { |
| 540 | // block6: do_work(&a); |
| 541 | // n = 2; |
| 542 | // } |
| 543 | // } |
| 544 | // block7: if (a) |
| 545 | // block8: g(); |
| 546 | // block9: return n; |
| 547 | // } |
| 548 | // |
| 549 | // Starting from the maybe-uninitialized use in block 9: |
| 550 | // * Block 7 is not visited because we have only visited one of its two |
| 551 | // successors. |
| 552 | // * Block 8 is visited because we've visited its only successor. |
| 553 | // From block 8: |
| 554 | // * Block 7 is visited because we've now visited both of its successors. |
| 555 | // From block 7: |
| 556 | // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all |
| 557 | // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively). |
| 558 | // * Block 3 is not visited because it initializes 'n'. |
| 559 | // Now the algorithm terminates, having visited blocks 7 and 8, and having |
| 560 | // found the frontier is blocks 2, 4, and 5. |
| 561 | // |
| 562 | // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2 |
| 563 | // and 4), so we report that any time either of those edges is taken (in |
| 564 | // each case when 'b == false'), 'n' is used uninitialized. |
| 565 | SmallVector<const CFGBlock*, 32> Queue; |
| 566 | SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0); |
| 567 | Queue.push_back(Elt: block); |
| 568 | // Specify that we've already visited all successors of the starting block. |
| 569 | // This has the dual purpose of ensuring we never add it to the queue, and |
| 570 | // of marking it as not being a candidate element of the frontier. |
| 571 | SuccsVisited[block->getBlockID()] = block->succ_size(); |
| 572 | while (!Queue.empty()) { |
| 573 | const CFGBlock *B = Queue.pop_back_val(); |
| 574 | |
| 575 | // If the use is always reached from the entry block, make a note of that. |
| 576 | if (B == &cfg.getEntry()) |
| 577 | Use.setUninitAfterCall(); |
| 578 | |
| 579 | for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end(); |
| 580 | I != E; ++I) { |
| 581 | const CFGBlock *Pred = *I; |
| 582 | if (!Pred) |
| 583 | continue; |
| 584 | |
| 585 | Value AtPredExit = vals.getValue(block: Pred, vd); |
| 586 | if (AtPredExit == Initialized) |
| 587 | // This block initializes the variable. |
| 588 | continue; |
| 589 | if (AtPredExit == MayUninitialized && |
| 590 | vals.getValue(block: B, vd) == Uninitialized) { |
| 591 | // This block declares the variable (uninitialized), and is reachable |
| 592 | // from a block that initializes the variable. We can't guarantee to |
| 593 | // give an earlier location for the diagnostic (and it appears that |
| 594 | // this code is intended to be reachable) so give a diagnostic here |
| 595 | // and go no further down this path. |
| 596 | Use.setUninitAfterDecl(); |
| 597 | continue; |
| 598 | } |
| 599 | |
| 600 | unsigned &SV = SuccsVisited[Pred->getBlockID()]; |
| 601 | if (!SV) { |
| 602 | // When visiting the first successor of a block, mark all NULL |
| 603 | // successors as having been visited. |
| 604 | for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(), |
| 605 | SE = Pred->succ_end(); |
| 606 | SI != SE; ++SI) |
| 607 | if (!*SI) |
| 608 | ++SV; |
| 609 | } |
| 610 | |
| 611 | if (++SV == Pred->succ_size()) |
| 612 | // All paths from this block lead to the use and don't initialize the |
| 613 | // variable. |
| 614 | Queue.push_back(Elt: Pred); |
| 615 | } |
| 616 | } |
| 617 | |
| 618 | // Scan the frontier, looking for blocks where the variable was |
| 619 | // uninitialized. |
| 620 | for (const auto *Block : cfg) { |
| 621 | if (vals.getValue(block: Block, vd) != Uninitialized) |
| 622 | continue; |
| 623 | unsigned BlockID = Block->getBlockID(); |
| 624 | const Stmt *Term = Block->getTerminatorStmt(); |
| 625 | if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() && |
| 626 | Term) { |
| 627 | // This block inevitably leads to the use. If we have an edge from here |
| 628 | // to a post-dominator block, and the variable is uninitialized on that |
| 629 | // edge, we have found a bug. |
| 630 | for (CFGBlock::const_succ_iterator I = Block->succ_begin(), |
| 631 | E = Block->succ_end(); I != E; ++I) { |
| 632 | const CFGBlock *Succ = *I; |
| 633 | if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size()) { |
| 634 | // Switch cases are a special case: report the label to the caller |
| 635 | // as the 'terminator', not the switch statement itself. Suppress |
| 636 | // situations where no label matched: we can't be sure that's |
| 637 | // possible. |
| 638 | if (isa<SwitchStmt>(Val: Term)) { |
| 639 | const Stmt *Label = Succ->getLabel(); |
| 640 | if (!Label || !isa<SwitchCase>(Val: Label)) |
| 641 | // Might not be possible. |
| 642 | continue; |
| 643 | UninitUse::Branch Branch; |
| 644 | Branch.Terminator = Label; |
| 645 | Branch.Output = 0; // Ignored. |
| 646 | Use.addUninitBranch(B: Branch); |
| 647 | } else { |
| 648 | UninitUse::Branch Branch; |
| 649 | Branch.Terminator = Term; |
| 650 | Branch.Output = I - Block->succ_begin(); |
| 651 | Use.addUninitBranch(B: Branch); |
| 652 | } |
| 653 | } |
| 654 | } |
| 655 | } |
| 656 | } |
| 657 | |
| 658 | return Use; |
| 659 | } |
| 660 | }; |
| 661 | |
| 662 | } // namespace |
| 663 | |
| 664 | void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) { |
| 665 | Value v = vals[vd]; |
| 666 | if (isUninitialized(v)) |
| 667 | handler.handleUseOfUninitVariable(vd, use: getUninitUse(ex, vd, v)); |
| 668 | } |
| 669 | |
| 670 | void TransferFunctions::reportConstRefUse(const Expr *ex, const VarDecl *vd) { |
| 671 | Value v = vals[vd]; |
| 672 | if (isAlwaysUninit(v)) { |
| 673 | auto use = getUninitUse(ex, vd, v); |
| 674 | use.setConstRefUse(); |
| 675 | handler.handleUseOfUninitVariable(vd, use); |
| 676 | } |
| 677 | } |
| 678 | |
| 679 | void TransferFunctions::reportConstPtrUse(const Expr *ex, const VarDecl *vd) { |
| 680 | Value v = vals[vd]; |
| 681 | if (isAlwaysUninit(v)) { |
| 682 | auto use = getUninitUse(ex, vd, v); |
| 683 | use.setConstPtrUse(); |
| 684 | handler.handleUseOfUninitVariable(vd, use); |
| 685 | } |
| 686 | } |
| 687 | |
| 688 | void TransferFunctions::VisitObjCForCollectionStmt( |
| 689 | const ObjCForCollectionStmt *FS) { |
| 690 | // This represents an initialization of the 'element' value. |
| 691 | if (const auto *DS = dyn_cast<DeclStmt>(Val: FS->getElement())) { |
| 692 | const auto *VD = cast<VarDecl>(Val: DS->getSingleDecl()); |
| 693 | if (isTrackedVar(vd: VD)) |
| 694 | vals[VD] = Initialized; |
| 695 | } |
| 696 | } |
| 697 | |
| 698 | void TransferFunctions::VisitOMPExecutableDirective( |
| 699 | const OMPExecutableDirective *ED) { |
| 700 | for (const Stmt *S : |
| 701 | OMPExecutableDirective::used_clauses_children(Clauses: ED->clauses())) { |
| 702 | assert(S && "Expected non-null used-in-clause child." ); |
| 703 | Visit(S); |
| 704 | } |
| 705 | if (!ED->isStandaloneDirective()) |
| 706 | Visit(S: ED->getStructuredBlock()); |
| 707 | } |
| 708 | |
| 709 | void TransferFunctions::VisitBlockExpr(const BlockExpr *be) { |
| 710 | const BlockDecl *bd = be->getBlockDecl(); |
| 711 | for (const auto &I : bd->captures()) { |
| 712 | const VarDecl *vd = I.getVariable(); |
| 713 | if (!isTrackedVar(vd)) |
| 714 | continue; |
| 715 | if (I.isByRef()) { |
| 716 | vals[vd] = Initialized; |
| 717 | continue; |
| 718 | } |
| 719 | reportUse(ex: be, vd); |
| 720 | } |
| 721 | } |
| 722 | |
| 723 | void TransferFunctions::VisitCallExpr(const CallExpr *ce) { |
| 724 | if (const Decl *Callee = ce->getCalleeDecl()) { |
| 725 | if (Callee->hasAttr<ReturnsTwiceAttr>()) { |
| 726 | // After a call to a function like setjmp or vfork, any variable which is |
| 727 | // initialized anywhere within this function may now be initialized. For |
| 728 | // now, just assume such a call initializes all variables. FIXME: Only |
| 729 | // mark variables as initialized if they have an initializer which is |
| 730 | // reachable from here. |
| 731 | vals.setAllScratchValues(Initialized); |
| 732 | } |
| 733 | else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) { |
| 734 | // Functions labeled like "analyzer_noreturn" are often used to denote |
| 735 | // "panic" functions that in special debug situations can still return, |
| 736 | // but for the most part should not be treated as returning. This is a |
| 737 | // useful annotation borrowed from the static analyzer that is useful for |
| 738 | // suppressing branch-specific false positives when we call one of these |
| 739 | // functions but keep pretending the path continues (when in reality the |
| 740 | // user doesn't care). |
| 741 | vals.setAllScratchValues(Unknown); |
| 742 | } |
| 743 | } |
| 744 | } |
| 745 | |
| 746 | void TransferFunctions::VisitDeclRefExpr(const DeclRefExpr *dr) { |
| 747 | switch (classification.get(DRE: dr)) { |
| 748 | case ClassifyRefs::Ignore: |
| 749 | break; |
| 750 | case ClassifyRefs::Use: |
| 751 | reportUse(ex: dr, vd: cast<VarDecl>(Val: dr->getDecl())); |
| 752 | break; |
| 753 | case ClassifyRefs::Init: |
| 754 | vals[cast<VarDecl>(Val: dr->getDecl())] = Initialized; |
| 755 | break; |
| 756 | case ClassifyRefs::SelfInit: |
| 757 | handler.handleSelfInit(vd: cast<VarDecl>(Val: dr->getDecl())); |
| 758 | break; |
| 759 | case ClassifyRefs::ConstRefUse: |
| 760 | reportConstRefUse(ex: dr, vd: cast<VarDecl>(Val: dr->getDecl())); |
| 761 | break; |
| 762 | case ClassifyRefs::ConstPtrUse: |
| 763 | reportConstPtrUse(ex: dr, vd: cast<VarDecl>(Val: dr->getDecl())); |
| 764 | break; |
| 765 | } |
| 766 | } |
| 767 | |
| 768 | void TransferFunctions::VisitBinaryOperator(const BinaryOperator *BO) { |
| 769 | if (BO->getOpcode() == BO_Assign) { |
| 770 | FindVarResult Var = findVar(ex: BO->getLHS()); |
| 771 | if (const VarDecl *VD = Var.getDecl()) |
| 772 | vals[VD] = Initialized; |
| 773 | } |
| 774 | } |
| 775 | |
| 776 | void TransferFunctions::VisitDeclStmt(const DeclStmt *DS) { |
| 777 | for (const Decl *DI : DS->decls()) { |
| 778 | const auto *VD = dyn_cast<VarDecl>(Val: DI); |
| 779 | if (VD && isTrackedVar(vd: VD)) { |
| 780 | if (getSelfInitExpr(VD)) { |
| 781 | // If the initializer consists solely of a reference to itself, we |
| 782 | // explicitly mark the variable as uninitialized. This allows code |
| 783 | // like the following: |
| 784 | // |
| 785 | // int x = x; |
| 786 | // |
| 787 | // to deliberately leave a variable uninitialized. Different analysis |
| 788 | // clients can detect this pattern and adjust their reporting |
| 789 | // appropriately, but we need to continue to analyze subsequent uses |
| 790 | // of the variable. |
| 791 | vals[VD] = Uninitialized; |
| 792 | } else if (VD->getInit()) { |
| 793 | // Treat the new variable as initialized. |
| 794 | vals[VD] = Initialized; |
| 795 | } else { |
| 796 | // No initializer: the variable is now uninitialized. This matters |
| 797 | // for cases like: |
| 798 | // while (...) { |
| 799 | // int n; |
| 800 | // use(n); |
| 801 | // n = 0; |
| 802 | // } |
| 803 | // FIXME: Mark the variable as uninitialized whenever its scope is |
| 804 | // left, since its scope could be re-entered by a jump over the |
| 805 | // declaration. |
| 806 | vals[VD] = Uninitialized; |
| 807 | } |
| 808 | } |
| 809 | } |
| 810 | } |
| 811 | |
| 812 | void TransferFunctions::VisitGCCAsmStmt(const GCCAsmStmt *as) { |
| 813 | // An "asm goto" statement is a terminator that may initialize some variables. |
| 814 | if (!as->isAsmGoto()) |
| 815 | return; |
| 816 | |
| 817 | ASTContext &C = ac.getASTContext(); |
| 818 | for (const Expr *O : as->outputs()) { |
| 819 | const Expr *Ex = stripCasts(C, Ex: O); |
| 820 | |
| 821 | // Strip away any unary operators. Invalid l-values are reported by other |
| 822 | // semantic analysis passes. |
| 823 | while (const auto *UO = dyn_cast<UnaryOperator>(Val: Ex)) |
| 824 | Ex = stripCasts(C, Ex: UO->getSubExpr()); |
| 825 | |
| 826 | // Mark the variable as potentially uninitialized for those cases where |
| 827 | // it's used on an indirect path, where it's not guaranteed to be |
| 828 | // defined. |
| 829 | if (const VarDecl *VD = findVar(ex: Ex).getDecl()) |
| 830 | if (vals[VD] != Initialized) |
| 831 | vals[VD] = MayUninitialized; |
| 832 | } |
| 833 | } |
| 834 | |
| 835 | void TransferFunctions::VisitObjCMessageExpr(const ObjCMessageExpr *ME) { |
| 836 | // If the Objective-C message expression is an implicit no-return that |
| 837 | // is not modeled in the CFG, set the tracked dataflow values to Unknown. |
| 838 | if (objCNoRet.isImplicitNoReturn(ME)) { |
| 839 | vals.setAllScratchValues(Unknown); |
| 840 | } |
| 841 | } |
| 842 | |
| 843 | //------------------------------------------------------------------------====// |
| 844 | // High-level "driver" logic for uninitialized values analysis. |
| 845 | //====------------------------------------------------------------------------// |
| 846 | |
| 847 | static bool runOnBlock(const CFGBlock *block, const CFG &cfg, |
| 848 | AnalysisDeclContext &ac, CFGBlockValues &vals, |
| 849 | const ClassifyRefs &classification, |
| 850 | llvm::BitVector &wasAnalyzed, |
| 851 | UninitVariablesHandler &handler) { |
| 852 | wasAnalyzed[block->getBlockID()] = true; |
| 853 | vals.resetScratch(); |
| 854 | // Merge in values of predecessor blocks. |
| 855 | bool isFirst = true; |
| 856 | for (CFGBlock::const_pred_iterator I = block->pred_begin(), |
| 857 | E = block->pred_end(); I != E; ++I) { |
| 858 | const CFGBlock *pred = *I; |
| 859 | if (!pred) |
| 860 | continue; |
| 861 | if (wasAnalyzed[pred->getBlockID()]) { |
| 862 | vals.mergeIntoScratch(source: vals.getValueVector(block: pred), isFirst); |
| 863 | isFirst = false; |
| 864 | } |
| 865 | } |
| 866 | // Apply the transfer function. |
| 867 | TransferFunctions tf(vals, cfg, block, ac, classification, handler); |
| 868 | for (const auto &I : *block) { |
| 869 | if (std::optional<CFGStmt> cs = I.getAs<CFGStmt>()) |
| 870 | tf.Visit(S: const_cast<Stmt *>(cs->getStmt())); |
| 871 | } |
| 872 | CFGTerminator terminator = block->getTerminator(); |
| 873 | if (auto *as = dyn_cast_or_null<GCCAsmStmt>(Val: terminator.getStmt())) |
| 874 | if (as->isAsmGoto()) |
| 875 | tf.Visit(S: as); |
| 876 | return vals.updateValueVectorWithScratch(block); |
| 877 | } |
| 878 | |
| 879 | namespace { |
| 880 | |
| 881 | /// PruneBlocksHandler is a special UninitVariablesHandler that is used |
| 882 | /// to detect when a CFGBlock has any *potential* use of an uninitialized |
| 883 | /// variable. It is mainly used to prune out work during the final |
| 884 | /// reporting pass. |
| 885 | struct PruneBlocksHandler : public UninitVariablesHandler { |
| 886 | /// Records if a CFGBlock had a potential use of an uninitialized variable. |
| 887 | llvm::BitVector hadUse; |
| 888 | |
| 889 | /// Records if any CFGBlock had a potential use of an uninitialized variable. |
| 890 | bool hadAnyUse = false; |
| 891 | |
| 892 | /// The current block to scribble use information. |
| 893 | unsigned currentBlock = 0; |
| 894 | |
| 895 | PruneBlocksHandler(unsigned numBlocks) : hadUse(numBlocks, false) {} |
| 896 | |
| 897 | ~PruneBlocksHandler() override = default; |
| 898 | |
| 899 | void handleUseOfUninitVariable(const VarDecl *vd, |
| 900 | const UninitUse &use) override { |
| 901 | hadUse[currentBlock] = true; |
| 902 | hadAnyUse = true; |
| 903 | } |
| 904 | |
| 905 | /// Called when the uninitialized variable analysis detects the |
| 906 | /// idiom 'int x = x'. All other uses of 'x' within the initializer |
| 907 | /// are handled by handleUseOfUninitVariable. |
| 908 | void handleSelfInit(const VarDecl *vd) override { |
| 909 | hadUse[currentBlock] = true; |
| 910 | hadAnyUse = true; |
| 911 | } |
| 912 | }; |
| 913 | |
| 914 | } // namespace |
| 915 | |
| 916 | void clang::runUninitializedVariablesAnalysis( |
| 917 | const DeclContext &dc, |
| 918 | const CFG &cfg, |
| 919 | AnalysisDeclContext &ac, |
| 920 | UninitVariablesHandler &handler, |
| 921 | UninitVariablesAnalysisStats &stats) { |
| 922 | CFGBlockValues vals(cfg); |
| 923 | vals.computeSetOfDeclarations(dc); |
| 924 | if (vals.hasNoDeclarations()) |
| 925 | return; |
| 926 | |
| 927 | stats.NumVariablesAnalyzed = vals.getNumEntries(); |
| 928 | |
| 929 | // Precompute which expressions are uses and which are initializations. |
| 930 | ClassifyRefs classification(ac); |
| 931 | cfg.VisitBlockStmts(O&: classification); |
| 932 | |
| 933 | // Mark all variables uninitialized at the entry. |
| 934 | const CFGBlock &entry = cfg.getEntry(); |
| 935 | ValueVector &vec = vals.getValueVector(block: &entry); |
| 936 | const unsigned n = vals.getNumEntries(); |
| 937 | for (unsigned j = 0; j < n; ++j) { |
| 938 | vec[j] = Uninitialized; |
| 939 | } |
| 940 | |
| 941 | // Proceed with the workist. |
| 942 | ForwardDataflowWorklist worklist(cfg, ac); |
| 943 | llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); |
| 944 | worklist.enqueueSuccessors(Block: &cfg.getEntry()); |
| 945 | llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); |
| 946 | wasAnalyzed[cfg.getEntry().getBlockID()] = true; |
| 947 | PruneBlocksHandler PBH(cfg.getNumBlockIDs()); |
| 948 | |
| 949 | while (const CFGBlock *block = worklist.dequeue()) { |
| 950 | PBH.currentBlock = block->getBlockID(); |
| 951 | |
| 952 | // Did the block change? |
| 953 | bool changed = runOnBlock(block, cfg, ac, vals, |
| 954 | classification, wasAnalyzed, handler&: PBH); |
| 955 | ++stats.NumBlockVisits; |
| 956 | if (changed || !previouslyVisited[block->getBlockID()]) |
| 957 | worklist.enqueueSuccessors(Block: block); |
| 958 | previouslyVisited[block->getBlockID()] = true; |
| 959 | } |
| 960 | |
| 961 | if (!PBH.hadAnyUse) |
| 962 | return; |
| 963 | |
| 964 | // Run through the blocks one more time, and report uninitialized variables. |
| 965 | for (const auto *block : cfg) |
| 966 | if (PBH.hadUse[block->getBlockID()]) { |
| 967 | runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler); |
| 968 | ++stats.NumBlockVisits; |
| 969 | } |
| 970 | } |
| 971 | |
| 972 | UninitVariablesHandler::~UninitVariablesHandler() = default; |
| 973 | |