| 1 | //===-- Transfer.cpp --------------------------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file defines transfer functions that evaluate program statements and |
| 10 | // update an environment accordingly. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "clang/Analysis/FlowSensitive/Transfer.h" |
| 15 | #include "clang/AST/Decl.h" |
| 16 | #include "clang/AST/DeclBase.h" |
| 17 | #include "clang/AST/DeclCXX.h" |
| 18 | #include "clang/AST/Expr.h" |
| 19 | #include "clang/AST/ExprCXX.h" |
| 20 | #include "clang/AST/OperationKinds.h" |
| 21 | #include "clang/AST/Stmt.h" |
| 22 | #include "clang/AST/StmtVisitor.h" |
| 23 | #include "clang/AST/Type.h" |
| 24 | #include "clang/Analysis/FlowSensitive/ASTOps.h" |
| 25 | #include "clang/Analysis/FlowSensitive/AdornedCFG.h" |
| 26 | #include "clang/Analysis/FlowSensitive/DataflowAnalysisContext.h" |
| 27 | #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h" |
| 28 | #include "clang/Analysis/FlowSensitive/NoopAnalysis.h" |
| 29 | #include "clang/Analysis/FlowSensitive/RecordOps.h" |
| 30 | #include "clang/Analysis/FlowSensitive/StorageLocation.h" |
| 31 | #include "clang/Analysis/FlowSensitive/Value.h" |
| 32 | #include "clang/Basic/Builtins.h" |
| 33 | #include "clang/Basic/LLVM.h" |
| 34 | #include "clang/Basic/OperatorKinds.h" |
| 35 | #include "llvm/Support/Casting.h" |
| 36 | #include <assert.h> |
| 37 | #include <cassert> |
| 38 | |
| 39 | #define DEBUG_TYPE "dataflow" |
| 40 | |
| 41 | namespace clang { |
| 42 | namespace dataflow { |
| 43 | |
| 44 | const Environment *StmtToEnvMap::getEnvironment(const Stmt &S) const { |
| 45 | const CFGBlock *Block = ACFG.blockForStmt(S); |
| 46 | if (Block == nullptr) { |
| 47 | assert(false); |
| 48 | return nullptr; |
| 49 | } |
| 50 | if (!ACFG.isBlockReachable(B: *Block)) |
| 51 | return nullptr; |
| 52 | if (Block->getBlockID() == CurBlockID) |
| 53 | return &CurState.Env; |
| 54 | const auto &State = BlockToState[Block->getBlockID()]; |
| 55 | if (!(State)) |
| 56 | return nullptr; |
| 57 | return &State->Env; |
| 58 | } |
| 59 | |
| 60 | static BoolValue &evaluateBooleanEquality(const Expr &LHS, const Expr &RHS, |
| 61 | Environment &Env) { |
| 62 | Value *LHSValue = Env.getValue(E: LHS); |
| 63 | Value *RHSValue = Env.getValue(E: RHS); |
| 64 | |
| 65 | // When two unsupported values are compared, both are nullptr. Only supported |
| 66 | // values should evaluate to equal. |
| 67 | if (LHSValue == RHSValue && LHSValue) |
| 68 | return Env.getBoolLiteralValue(Value: true); |
| 69 | |
| 70 | // Special case: `NullPtrLiteralExpr == itself`. When both sides are untyped |
| 71 | // nullptr, they do not have an assigned Value, but they compare equal. |
| 72 | if (LHS.getType()->isNullPtrType() && RHS.getType()->isNullPtrType()) |
| 73 | return Env.getBoolLiteralValue(Value: true); |
| 74 | |
| 75 | if (auto *LHSBool = dyn_cast_or_null<BoolValue>(Val: LHSValue)) |
| 76 | if (auto *RHSBool = dyn_cast_or_null<BoolValue>(Val: RHSValue)) |
| 77 | return Env.makeIff(LHS&: *LHSBool, RHS&: *RHSBool); |
| 78 | |
| 79 | if (auto *LHSPtr = dyn_cast_or_null<PointerValue>(Val: LHSValue)) |
| 80 | if (auto *RHSPtr = dyn_cast_or_null<PointerValue>(Val: RHSValue)) |
| 81 | // If the storage locations are the same, the pointers definitely compare |
| 82 | // the same. If the storage locations are different, they may still alias, |
| 83 | // so we fall through to the case below that returns an atom. |
| 84 | if (&LHSPtr->getPointeeLoc() == &RHSPtr->getPointeeLoc()) |
| 85 | return Env.getBoolLiteralValue(Value: true); |
| 86 | |
| 87 | return Env.makeAtomicBoolValue(); |
| 88 | } |
| 89 | |
| 90 | static BoolValue &unpackValue(BoolValue &V, Environment &Env) { |
| 91 | if (auto *Top = llvm::dyn_cast<TopBoolValue>(Val: &V)) { |
| 92 | auto &A = Env.getDataflowAnalysisContext().arena(); |
| 93 | return A.makeBoolValue(A.makeAtomRef(A: Top->getAtom())); |
| 94 | } |
| 95 | return V; |
| 96 | } |
| 97 | |
| 98 | // Unpacks the value (if any) associated with `E` and updates `E` to the new |
| 99 | // value, if any unpacking occurred. Also, does the lvalue-to-rvalue conversion, |
| 100 | // by skipping past the reference. |
| 101 | static Value *maybeUnpackLValueExpr(const Expr &E, Environment &Env) { |
| 102 | auto *Loc = Env.getStorageLocation(E); |
| 103 | if (Loc == nullptr) |
| 104 | return nullptr; |
| 105 | auto *Val = Env.getValue(Loc: *Loc); |
| 106 | |
| 107 | auto *B = dyn_cast_or_null<BoolValue>(Val); |
| 108 | if (B == nullptr) |
| 109 | return Val; |
| 110 | |
| 111 | auto &UnpackedVal = unpackValue(V&: *B, Env); |
| 112 | if (&UnpackedVal == Val) |
| 113 | return Val; |
| 114 | Env.setValue(Loc: *Loc, Val&: UnpackedVal); |
| 115 | return &UnpackedVal; |
| 116 | } |
| 117 | |
| 118 | static void propagateValue(const Expr &From, const Expr &To, Environment &Env) { |
| 119 | if (From.getType()->isRecordType()) |
| 120 | return; |
| 121 | if (auto *Val = Env.getValue(E: From)) |
| 122 | Env.setValue(E: To, Val&: *Val); |
| 123 | } |
| 124 | |
| 125 | static void propagateStorageLocation(const Expr &From, const Expr &To, |
| 126 | Environment &Env) { |
| 127 | if (auto *Loc = Env.getStorageLocation(E: From)) |
| 128 | Env.setStorageLocation(E: To, Loc&: *Loc); |
| 129 | } |
| 130 | |
| 131 | // Propagates the value or storage location of `From` to `To` in cases where |
| 132 | // `From` may be either a glvalue or a prvalue. `To` must be a glvalue iff |
| 133 | // `From` is a glvalue. |
| 134 | static void propagateValueOrStorageLocation(const Expr &From, const Expr &To, |
| 135 | Environment &Env) { |
| 136 | assert(From.isGLValue() == To.isGLValue()); |
| 137 | if (From.isGLValue()) |
| 138 | propagateStorageLocation(From, To, Env); |
| 139 | else |
| 140 | propagateValue(From, To, Env); |
| 141 | } |
| 142 | |
| 143 | namespace { |
| 144 | |
| 145 | class TransferVisitor : public ConstStmtVisitor<TransferVisitor> { |
| 146 | public: |
| 147 | TransferVisitor(const StmtToEnvMap &StmtToEnv, Environment &Env, |
| 148 | Environment::ValueModel &Model) |
| 149 | : StmtToEnv(StmtToEnv), Env(Env), Model(Model) {} |
| 150 | |
| 151 | void VisitBinaryOperator(const BinaryOperator *S) { |
| 152 | const Expr *LHS = S->getLHS(); |
| 153 | assert(LHS != nullptr); |
| 154 | |
| 155 | const Expr *RHS = S->getRHS(); |
| 156 | assert(RHS != nullptr); |
| 157 | |
| 158 | // Do compound assignments up-front, as there are so many of them and we |
| 159 | // don't want to list all of them in the switch statement below. |
| 160 | // To avoid generating unnecessary values, we don't create a new value but |
| 161 | // instead leave it to the specific analysis to do this if desired. |
| 162 | if (S->isCompoundAssignmentOp()) |
| 163 | propagateStorageLocation(From: *S->getLHS(), To: *S, Env); |
| 164 | |
| 165 | switch (S->getOpcode()) { |
| 166 | case BO_Assign: { |
| 167 | auto *LHSLoc = Env.getStorageLocation(E: *LHS); |
| 168 | if (LHSLoc == nullptr) |
| 169 | break; |
| 170 | |
| 171 | auto *RHSVal = Env.getValue(E: *RHS); |
| 172 | if (RHSVal == nullptr) |
| 173 | RHSVal = Env.createValue(Type: LHS->getType()); |
| 174 | |
| 175 | // Assign a value to the storage location of the left-hand side. |
| 176 | Env.setValue(Loc: *LHSLoc, Val&: *RHSVal); |
| 177 | |
| 178 | // Assign a storage location for the whole expression. |
| 179 | Env.setStorageLocation(E: *S, Loc&: *LHSLoc); |
| 180 | break; |
| 181 | } |
| 182 | case BO_LAnd: |
| 183 | case BO_LOr: { |
| 184 | BoolValue &LHSVal = getLogicOperatorSubExprValue(SubExpr: *LHS); |
| 185 | BoolValue &RHSVal = getLogicOperatorSubExprValue(SubExpr: *RHS); |
| 186 | |
| 187 | if (S->getOpcode() == BO_LAnd) |
| 188 | Env.setValue(E: *S, Val&: Env.makeAnd(LHS&: LHSVal, RHS&: RHSVal)); |
| 189 | else |
| 190 | Env.setValue(E: *S, Val&: Env.makeOr(LHS&: LHSVal, RHS&: RHSVal)); |
| 191 | break; |
| 192 | } |
| 193 | case BO_NE: |
| 194 | case BO_EQ: { |
| 195 | auto &LHSEqRHSValue = evaluateBooleanEquality(LHS: *LHS, RHS: *RHS, Env); |
| 196 | Env.setValue(E: *S, Val&: S->getOpcode() == BO_EQ ? LHSEqRHSValue |
| 197 | : Env.makeNot(Val&: LHSEqRHSValue)); |
| 198 | break; |
| 199 | } |
| 200 | case BO_Comma: { |
| 201 | propagateValueOrStorageLocation(From: *RHS, To: *S, Env); |
| 202 | break; |
| 203 | } |
| 204 | default: |
| 205 | break; |
| 206 | } |
| 207 | } |
| 208 | |
| 209 | void VisitDeclRefExpr(const DeclRefExpr *S) { |
| 210 | const ValueDecl *VD = S->getDecl(); |
| 211 | assert(VD != nullptr); |
| 212 | |
| 213 | // Some `DeclRefExpr`s aren't glvalues, so we can't associate them with a |
| 214 | // `StorageLocation`, and there's also no sensible `Value` that we can |
| 215 | // assign to them. Examples: |
| 216 | // - Non-static member variables |
| 217 | // - Non static member functions |
| 218 | // Note: Member operators are an exception to this, but apparently only |
| 219 | // if the `DeclRefExpr` is used within the callee of a |
| 220 | // `CXXOperatorCallExpr`. In other cases, for example when applying the |
| 221 | // address-of operator, the `DeclRefExpr` is a prvalue. |
| 222 | if (!S->isGLValue()) |
| 223 | return; |
| 224 | |
| 225 | auto *DeclLoc = Env.getStorageLocation(D: *VD); |
| 226 | if (DeclLoc == nullptr) |
| 227 | return; |
| 228 | |
| 229 | Env.setStorageLocation(E: *S, Loc&: *DeclLoc); |
| 230 | } |
| 231 | |
| 232 | void VisitDeclStmt(const DeclStmt *S) { |
| 233 | // Group decls are converted into single decls in the CFG so the cast below |
| 234 | // is safe. |
| 235 | const auto &D = *cast<VarDecl>(Val: S->getSingleDecl()); |
| 236 | |
| 237 | ProcessVarDecl(D); |
| 238 | } |
| 239 | |
| 240 | void ProcessVarDecl(const VarDecl &D) { |
| 241 | // Static local vars are already initialized in `Environment`. |
| 242 | if (D.hasGlobalStorage()) |
| 243 | return; |
| 244 | |
| 245 | // If this is the holding variable for a `BindingDecl`, we may already |
| 246 | // have a storage location set up -- so check. (See also explanation below |
| 247 | // where we process the `BindingDecl`.) |
| 248 | if (D.getType()->isReferenceType() && Env.getStorageLocation(D) != nullptr) |
| 249 | return; |
| 250 | |
| 251 | assert(Env.getStorageLocation(D) == nullptr); |
| 252 | |
| 253 | Env.setStorageLocation(D, Loc&: Env.createObject(D)); |
| 254 | |
| 255 | // `DecompositionDecl` must be handled after we've interpreted the loc |
| 256 | // itself, because the binding expression refers back to the |
| 257 | // `DecompositionDecl` (even though it has no written name). |
| 258 | if (const auto *Decomp = dyn_cast<DecompositionDecl>(Val: &D)) { |
| 259 | // If VarDecl is a DecompositionDecl, evaluate each of its bindings. This |
| 260 | // needs to be evaluated after initializing the values in the storage for |
| 261 | // VarDecl, as the bindings refer to them. |
| 262 | // FIXME: Add support for ArraySubscriptExpr. |
| 263 | // FIXME: Consider adding AST nodes used in BindingDecls to the CFG. |
| 264 | for (const auto *B : Decomp->bindings()) { |
| 265 | if (auto *ME = dyn_cast_or_null<MemberExpr>(Val: B->getBinding())) { |
| 266 | auto *DE = dyn_cast_or_null<DeclRefExpr>(Val: ME->getBase()); |
| 267 | if (DE == nullptr) |
| 268 | continue; |
| 269 | |
| 270 | // ME and its base haven't been visited because they aren't included |
| 271 | // in the statements of the CFG basic block. |
| 272 | VisitDeclRefExpr(S: DE); |
| 273 | VisitMemberExpr(S: ME); |
| 274 | |
| 275 | if (auto *Loc = Env.getStorageLocation(E: *ME)) |
| 276 | Env.setStorageLocation(D: *B, Loc&: *Loc); |
| 277 | } else if (auto *VD = B->getHoldingVar()) { |
| 278 | // Holding vars are used to back the `BindingDecl`s of tuple-like |
| 279 | // types. The holding var declarations appear after the |
| 280 | // `DecompositionDecl`, so we have to explicitly process them here |
| 281 | // to know their storage location. They will be processed a second |
| 282 | // time when we visit their `VarDecl`s, so we have code that protects |
| 283 | // against this above. |
| 284 | ProcessVarDecl(D: *VD); |
| 285 | auto *VDLoc = Env.getStorageLocation(D: *VD); |
| 286 | assert(VDLoc != nullptr); |
| 287 | Env.setStorageLocation(D: *B, Loc&: *VDLoc); |
| 288 | } |
| 289 | } |
| 290 | } |
| 291 | } |
| 292 | |
| 293 | void VisitCastExpr(const CastExpr *S) { |
| 294 | const Expr *SubExpr = S->getSubExpr(); |
| 295 | assert(SubExpr != nullptr); |
| 296 | |
| 297 | switch (S->getCastKind()) { |
| 298 | case CK_IntegralToBoolean: { |
| 299 | // This cast creates a new, boolean value from the integral value. We |
| 300 | // model that with a fresh value in the environment, unless it's already a |
| 301 | // boolean. |
| 302 | if (auto *SubExprVal = |
| 303 | dyn_cast_or_null<BoolValue>(Val: Env.getValue(E: *SubExpr))) |
| 304 | Env.setValue(E: *S, Val&: *SubExprVal); |
| 305 | else |
| 306 | // FIXME: If integer modeling is added, then update this code to create |
| 307 | // the boolean based on the integer model. |
| 308 | Env.setValue(E: *S, Val&: Env.makeAtomicBoolValue()); |
| 309 | break; |
| 310 | } |
| 311 | |
| 312 | case CK_LValueToRValue: { |
| 313 | // When an L-value is used as an R-value, it may result in sharing, so we |
| 314 | // need to unpack any nested `Top`s. |
| 315 | auto *SubExprVal = maybeUnpackLValueExpr(E: *SubExpr, Env); |
| 316 | if (SubExprVal == nullptr) |
| 317 | break; |
| 318 | |
| 319 | Env.setValue(E: *S, Val&: *SubExprVal); |
| 320 | break; |
| 321 | } |
| 322 | |
| 323 | case CK_BaseToDerived: { |
| 324 | // This is a cast of (single-layer) pointer or reference to a record type. |
| 325 | // We should now model the fields for the derived type. |
| 326 | |
| 327 | // Get the RecordStorageLocation for the record object underneath. |
| 328 | RecordStorageLocation *Loc = nullptr; |
| 329 | if (S->getType()->isPointerType()) { |
| 330 | auto *PV = Env.get<PointerValue>(E: *SubExpr); |
| 331 | assert(PV != nullptr); |
| 332 | if (PV == nullptr) |
| 333 | break; |
| 334 | Loc = cast<RecordStorageLocation>(Val: &PV->getPointeeLoc()); |
| 335 | } else { |
| 336 | assert(S->getType()->isRecordType()); |
| 337 | if (SubExpr->isGLValue()) { |
| 338 | Loc = Env.get<RecordStorageLocation>(E: *SubExpr); |
| 339 | } else { |
| 340 | Loc = &Env.getResultObjectLocation(RecordPRValue: *SubExpr); |
| 341 | } |
| 342 | } |
| 343 | if (!Loc) { |
| 344 | // Nowhere to add children or propagate from, so we're done. |
| 345 | break; |
| 346 | } |
| 347 | |
| 348 | // Get the derived record type underneath the reference or pointer. |
| 349 | QualType Derived = S->getType().getNonReferenceType(); |
| 350 | if (Derived->isPointerType()) { |
| 351 | Derived = Derived->getPointeeType(); |
| 352 | } |
| 353 | |
| 354 | // Add children to the storage location for fields (including synthetic |
| 355 | // fields) of the derived type and initialize their values. |
| 356 | for (const FieldDecl *Field : |
| 357 | Env.getDataflowAnalysisContext().getModeledFields(Type: Derived)) { |
| 358 | assert(Field != nullptr); |
| 359 | QualType FieldType = Field->getType(); |
| 360 | if (FieldType->isReferenceType()) { |
| 361 | Loc->addChild(D: *Field, Loc: nullptr); |
| 362 | } else { |
| 363 | Loc->addChild(D: *Field, Loc: &Env.createStorageLocation(Type: FieldType)); |
| 364 | } |
| 365 | |
| 366 | for (const auto &Entry : |
| 367 | Env.getDataflowAnalysisContext().getSyntheticFields(Type: Derived)) { |
| 368 | Loc->addSyntheticField(Name: Entry.getKey(), |
| 369 | Loc&: Env.createStorageLocation(Type: Entry.getValue())); |
| 370 | } |
| 371 | } |
| 372 | Env.initializeFieldsWithValues(Loc&: *Loc, Type: Derived); |
| 373 | |
| 374 | // Fall through to propagate SubExpr's StorageLocation to the CastExpr. |
| 375 | [[fallthrough]]; |
| 376 | } |
| 377 | case CK_IntegralCast: |
| 378 | // FIXME: This cast creates a new integral value from the |
| 379 | // subexpression. But, because we don't model integers, we don't |
| 380 | // distinguish between this new value and the underlying one. If integer |
| 381 | // modeling is added, then update this code to create a fresh location and |
| 382 | // value. |
| 383 | case CK_UncheckedDerivedToBase: |
| 384 | case CK_DerivedToBase: |
| 385 | case CK_ConstructorConversion: |
| 386 | case CK_UserDefinedConversion: |
| 387 | case CK_NoOp: { |
| 388 | // FIXME: Consider making `Environment::getStorageLocation` skip noop |
| 389 | // expressions (this and other similar expressions in the file) instead |
| 390 | // of assigning them storage locations. |
| 391 | propagateValueOrStorageLocation(From: *SubExpr, To: *S, Env); |
| 392 | break; |
| 393 | } |
| 394 | case CK_NullToPointer: { |
| 395 | auto &NullPointerVal = |
| 396 | Env.getOrCreateNullPointerValue(PointeeType: S->getType()->getPointeeType()); |
| 397 | Env.setValue(E: *S, Val&: NullPointerVal); |
| 398 | break; |
| 399 | } |
| 400 | case CK_NullToMemberPointer: |
| 401 | // FIXME: Implement pointers to members. For now, don't associate a value |
| 402 | // with this expression. |
| 403 | break; |
| 404 | case CK_FunctionToPointerDecay: { |
| 405 | StorageLocation *PointeeLoc = Env.getStorageLocation(E: *SubExpr); |
| 406 | if (PointeeLoc == nullptr) |
| 407 | break; |
| 408 | |
| 409 | Env.setValue(E: *S, Val&: Env.create<PointerValue>(args&: *PointeeLoc)); |
| 410 | break; |
| 411 | } |
| 412 | case CK_BuiltinFnToFnPtr: |
| 413 | // Despite its name, the result type of `BuiltinFnToFnPtr` is a function, |
| 414 | // not a function pointer. In addition, builtin functions can only be |
| 415 | // called directly; it is not legal to take their address. We therefore |
| 416 | // don't need to create a value or storage location for them. |
| 417 | break; |
| 418 | default: |
| 419 | break; |
| 420 | } |
| 421 | } |
| 422 | |
| 423 | void VisitUnaryOperator(const UnaryOperator *S) { |
| 424 | const Expr *SubExpr = S->getSubExpr(); |
| 425 | assert(SubExpr != nullptr); |
| 426 | |
| 427 | switch (S->getOpcode()) { |
| 428 | case UO_Deref: { |
| 429 | const auto *SubExprVal = Env.get<PointerValue>(E: *SubExpr); |
| 430 | if (SubExprVal == nullptr) |
| 431 | break; |
| 432 | |
| 433 | Env.setStorageLocation(E: *S, Loc&: SubExprVal->getPointeeLoc()); |
| 434 | break; |
| 435 | } |
| 436 | case UO_AddrOf: { |
| 437 | // FIXME: Model pointers to members. |
| 438 | if (S->getType()->isMemberPointerType()) |
| 439 | break; |
| 440 | |
| 441 | if (StorageLocation *PointeeLoc = Env.getStorageLocation(E: *SubExpr)) |
| 442 | Env.setValue(E: *S, Val&: Env.create<PointerValue>(args&: *PointeeLoc)); |
| 443 | break; |
| 444 | } |
| 445 | case UO_LNot: { |
| 446 | auto *SubExprVal = dyn_cast_or_null<BoolValue>(Val: Env.getValue(E: *SubExpr)); |
| 447 | if (SubExprVal == nullptr) |
| 448 | break; |
| 449 | |
| 450 | Env.setValue(E: *S, Val&: Env.makeNot(Val&: *SubExprVal)); |
| 451 | break; |
| 452 | } |
| 453 | case UO_PreInc: |
| 454 | case UO_PreDec: |
| 455 | // Propagate the storage location and clear out any value associated with |
| 456 | // it (to represent the fact that the value has definitely changed). |
| 457 | // To avoid generating unnecessary values, we leave it to the specific |
| 458 | // analysis to create a new value if desired. |
| 459 | propagateStorageLocation(From: *S->getSubExpr(), To: *S, Env); |
| 460 | if (StorageLocation *Loc = Env.getStorageLocation(E: *S->getSubExpr())) |
| 461 | Env.clearValue(Loc: *Loc); |
| 462 | break; |
| 463 | case UO_PostInc: |
| 464 | case UO_PostDec: |
| 465 | // Propagate the old value, then clear out any value associated with the |
| 466 | // storage location (to represent the fact that the value has definitely |
| 467 | // changed). See above for rationale. |
| 468 | propagateValue(From: *S->getSubExpr(), To: *S, Env); |
| 469 | if (StorageLocation *Loc = Env.getStorageLocation(E: *S->getSubExpr())) |
| 470 | Env.clearValue(Loc: *Loc); |
| 471 | break; |
| 472 | default: |
| 473 | break; |
| 474 | } |
| 475 | } |
| 476 | |
| 477 | void VisitCXXThisExpr(const CXXThisExpr *S) { |
| 478 | auto *ThisPointeeLoc = Env.getThisPointeeStorageLocation(); |
| 479 | if (ThisPointeeLoc == nullptr) |
| 480 | // Unions are not supported yet, and will not have a location for the |
| 481 | // `this` expression's pointee. |
| 482 | return; |
| 483 | |
| 484 | Env.setValue(E: *S, Val&: Env.create<PointerValue>(args&: *ThisPointeeLoc)); |
| 485 | } |
| 486 | |
| 487 | void VisitCXXNewExpr(const CXXNewExpr *S) { |
| 488 | if (Value *Val = Env.createValue(Type: S->getType())) |
| 489 | Env.setValue(E: *S, Val&: *Val); |
| 490 | } |
| 491 | |
| 492 | void VisitCXXDeleteExpr(const CXXDeleteExpr *S) { |
| 493 | // Empty method. |
| 494 | // We consciously don't do anything on deletes. Diagnosing double deletes |
| 495 | // (for example) should be done by a specific analysis, not by the |
| 496 | // framework. |
| 497 | } |
| 498 | |
| 499 | void VisitReturnStmt(const ReturnStmt *S) { |
| 500 | if (!Env.getDataflowAnalysisContext().getOptions().ContextSensitiveOpts) |
| 501 | return; |
| 502 | |
| 503 | auto *Ret = S->getRetValue(); |
| 504 | if (Ret == nullptr) |
| 505 | return; |
| 506 | |
| 507 | if (Ret->isPRValue()) { |
| 508 | if (Ret->getType()->isRecordType()) |
| 509 | return; |
| 510 | |
| 511 | auto *Val = Env.getValue(E: *Ret); |
| 512 | if (Val == nullptr) |
| 513 | return; |
| 514 | |
| 515 | // FIXME: Model NRVO. |
| 516 | Env.setReturnValue(Val); |
| 517 | } else { |
| 518 | auto *Loc = Env.getStorageLocation(E: *Ret); |
| 519 | if (Loc == nullptr) |
| 520 | return; |
| 521 | |
| 522 | // FIXME: Model NRVO. |
| 523 | Env.setReturnStorageLocation(Loc); |
| 524 | } |
| 525 | } |
| 526 | |
| 527 | void VisitMemberExpr(const MemberExpr *S) { |
| 528 | ValueDecl *Member = S->getMemberDecl(); |
| 529 | assert(Member != nullptr); |
| 530 | |
| 531 | // FIXME: Consider assigning pointer values to function member expressions. |
| 532 | if (Member->isFunctionOrFunctionTemplate()) |
| 533 | return; |
| 534 | |
| 535 | // FIXME: if/when we add support for modeling enums, use that support here. |
| 536 | if (isa<EnumConstantDecl>(Val: Member)) |
| 537 | return; |
| 538 | |
| 539 | if (auto *D = dyn_cast<VarDecl>(Val: Member)) { |
| 540 | if (D->hasGlobalStorage()) { |
| 541 | auto *VarDeclLoc = Env.getStorageLocation(D: *D); |
| 542 | if (VarDeclLoc == nullptr) |
| 543 | return; |
| 544 | |
| 545 | Env.setStorageLocation(E: *S, Loc&: *VarDeclLoc); |
| 546 | return; |
| 547 | } |
| 548 | } |
| 549 | |
| 550 | RecordStorageLocation *BaseLoc = getBaseObjectLocation(ME: *S, Env); |
| 551 | if (BaseLoc == nullptr) |
| 552 | return; |
| 553 | |
| 554 | auto *MemberLoc = BaseLoc->getChild(D: *Member); |
| 555 | if (MemberLoc == nullptr) |
| 556 | return; |
| 557 | Env.setStorageLocation(E: *S, Loc&: *MemberLoc); |
| 558 | } |
| 559 | |
| 560 | void VisitCXXDefaultArgExpr(const CXXDefaultArgExpr *S) { |
| 561 | const Expr *ArgExpr = S->getExpr(); |
| 562 | assert(ArgExpr != nullptr); |
| 563 | propagateValueOrStorageLocation(From: *ArgExpr, To: *S, Env); |
| 564 | |
| 565 | if (S->isPRValue() && S->getType()->isRecordType()) { |
| 566 | auto &Loc = Env.getResultObjectLocation(RecordPRValue: *S); |
| 567 | Env.initializeFieldsWithValues(Loc); |
| 568 | } |
| 569 | } |
| 570 | |
| 571 | void VisitCXXDefaultInitExpr(const CXXDefaultInitExpr *S) { |
| 572 | const Expr *InitExpr = S->getExpr(); |
| 573 | assert(InitExpr != nullptr); |
| 574 | |
| 575 | // If this is a prvalue of record type, the handler for `*InitExpr` (if one |
| 576 | // exists) will initialize the result object; there is no value to propgate |
| 577 | // here. |
| 578 | if (S->getType()->isRecordType() && S->isPRValue()) |
| 579 | return; |
| 580 | |
| 581 | propagateValueOrStorageLocation(From: *InitExpr, To: *S, Env); |
| 582 | } |
| 583 | |
| 584 | void VisitCXXConstructExpr(const CXXConstructExpr *S) { |
| 585 | const CXXConstructorDecl *ConstructorDecl = S->getConstructor(); |
| 586 | assert(ConstructorDecl != nullptr); |
| 587 | |
| 588 | // `CXXConstructExpr` can have array type if default-initializing an array |
| 589 | // of records. We don't handle this specifically beyond potentially inlining |
| 590 | // the call. |
| 591 | if (!S->getType()->isRecordType()) { |
| 592 | transferInlineCall(S, F: ConstructorDecl); |
| 593 | return; |
| 594 | } |
| 595 | |
| 596 | RecordStorageLocation &Loc = Env.getResultObjectLocation(RecordPRValue: *S); |
| 597 | |
| 598 | if (ConstructorDecl->isCopyOrMoveConstructor()) { |
| 599 | // It is permissible for a copy/move constructor to have additional |
| 600 | // parameters as long as they have default arguments defined for them. |
| 601 | assert(S->getNumArgs() != 0); |
| 602 | |
| 603 | const Expr *Arg = S->getArg(Arg: 0); |
| 604 | assert(Arg != nullptr); |
| 605 | |
| 606 | auto *ArgLoc = Env.get<RecordStorageLocation>(E: *Arg); |
| 607 | if (ArgLoc == nullptr) |
| 608 | return; |
| 609 | |
| 610 | // Even if the copy/move constructor call is elidable, we choose to copy |
| 611 | // the record in all cases (which isn't wrong, just potentially not |
| 612 | // optimal). |
| 613 | // |
| 614 | // To handle cases of base class initializers in constructors, where a |
| 615 | // sibling derived class can be used to initialize a shared-base-class |
| 616 | // subobject through a DerivedToBase cast, intentionally copy only the |
| 617 | // parts of `ArgLoc` that are part of the base class being initialized. |
| 618 | // This is necessary because the type of `Loc` in these cases is the |
| 619 | // derived type ultimately being constructed, not the type of the base |
| 620 | // class subobject. |
| 621 | copyRecord(Src&: *ArgLoc, Dst&: Loc, Env, TypeToCopy: S->getType()); |
| 622 | return; |
| 623 | } |
| 624 | |
| 625 | Env.initializeFieldsWithValues(Loc, Type: S->getType()); |
| 626 | |
| 627 | transferInlineCall(S, F: ConstructorDecl); |
| 628 | } |
| 629 | |
| 630 | void VisitCXXOperatorCallExpr(const CXXOperatorCallExpr *S) { |
| 631 | if (S->getOperator() == OO_Equal) { |
| 632 | assert(S->getNumArgs() == 2); |
| 633 | |
| 634 | const Expr *Arg0 = S->getArg(Arg: 0); |
| 635 | assert(Arg0 != nullptr); |
| 636 | |
| 637 | const Expr *Arg1 = S->getArg(Arg: 1); |
| 638 | assert(Arg1 != nullptr); |
| 639 | |
| 640 | // Evaluate only copy and move assignment operators. |
| 641 | const auto *Method = |
| 642 | dyn_cast_or_null<CXXMethodDecl>(Val: S->getDirectCallee()); |
| 643 | if (!Method) |
| 644 | return; |
| 645 | if (!Method->isCopyAssignmentOperator() && |
| 646 | !Method->isMoveAssignmentOperator()) |
| 647 | return; |
| 648 | |
| 649 | RecordStorageLocation *LocSrc = nullptr; |
| 650 | if (Arg1->isPRValue()) { |
| 651 | LocSrc = &Env.getResultObjectLocation(RecordPRValue: *Arg1); |
| 652 | } else { |
| 653 | LocSrc = Env.get<RecordStorageLocation>(E: *Arg1); |
| 654 | } |
| 655 | auto *LocDst = Env.get<RecordStorageLocation>(E: *Arg0); |
| 656 | |
| 657 | if (LocSrc == nullptr || LocDst == nullptr) |
| 658 | return; |
| 659 | |
| 660 | // If the destination object here is of a derived class, `Arg0` may be a |
| 661 | // cast of that object to a base class, and the source object may be of a |
| 662 | // sibling derived class. To handle these cases, ensure we are copying |
| 663 | // only the fields for `Arg0`'s type, not the type of the underlying |
| 664 | // `RecordStorageLocation`. |
| 665 | copyRecord(Src&: *LocSrc, Dst&: *LocDst, Env, TypeToCopy: Arg0->getType()); |
| 666 | |
| 667 | // The assignment operator can have an arbitrary return type. We model the |
| 668 | // return value only if the return type is the same as or a base class of |
| 669 | // the destination type. |
| 670 | if (S->getType().getCanonicalType().getUnqualifiedType() != |
| 671 | LocDst->getType().getCanonicalType().getUnqualifiedType()) { |
| 672 | auto ReturnDecl = S->getType()->getAsCXXRecordDecl(); |
| 673 | auto DstDecl = LocDst->getType()->getAsCXXRecordDecl(); |
| 674 | if (ReturnDecl == nullptr || DstDecl == nullptr) |
| 675 | return; |
| 676 | if (!DstDecl->isDerivedFrom(Base: ReturnDecl)) |
| 677 | return; |
| 678 | } |
| 679 | |
| 680 | if (S->isGLValue()) |
| 681 | Env.setStorageLocation(E: *S, Loc&: *LocDst); |
| 682 | else |
| 683 | copyRecord(Src&: *LocDst, Dst&: Env.getResultObjectLocation(RecordPRValue: *S), Env); |
| 684 | |
| 685 | return; |
| 686 | } |
| 687 | |
| 688 | // `CXXOperatorCallExpr` can be a prvalue. Call `VisitCallExpr`() to |
| 689 | // initialize the prvalue's fields with values. |
| 690 | VisitCallExpr(S); |
| 691 | } |
| 692 | |
| 693 | void VisitCXXRewrittenBinaryOperator(const CXXRewrittenBinaryOperator *RBO) { |
| 694 | propagateValue(From: *RBO->getSemanticForm(), To: *RBO, Env); |
| 695 | } |
| 696 | |
| 697 | void VisitCallExpr(const CallExpr *S) { |
| 698 | // Of clang's builtins, only `__builtin_expect` is handled explicitly, since |
| 699 | // others (like trap, debugtrap, and unreachable) are handled by CFG |
| 700 | // construction. |
| 701 | if (S->isCallToStdMove()) { |
| 702 | assert(S->getNumArgs() == 1); |
| 703 | |
| 704 | const Expr *Arg = S->getArg(Arg: 0); |
| 705 | assert(Arg != nullptr); |
| 706 | |
| 707 | auto *ArgLoc = Env.getStorageLocation(E: *Arg); |
| 708 | if (ArgLoc == nullptr) |
| 709 | return; |
| 710 | |
| 711 | Env.setStorageLocation(E: *S, Loc&: *ArgLoc); |
| 712 | } else if (S->getDirectCallee() != nullptr && |
| 713 | S->getDirectCallee()->getBuiltinID() == |
| 714 | Builtin::BI__builtin_expect) { |
| 715 | assert(S->getNumArgs() > 0); |
| 716 | assert(S->getArg(0) != nullptr); |
| 717 | auto *ArgVal = Env.getValue(E: *S->getArg(Arg: 0)); |
| 718 | if (ArgVal == nullptr) |
| 719 | return; |
| 720 | Env.setValue(E: *S, Val&: *ArgVal); |
| 721 | } else if (const FunctionDecl *F = S->getDirectCallee()) { |
| 722 | transferInlineCall(S, F); |
| 723 | |
| 724 | // If this call produces a prvalue of record type, initialize its fields |
| 725 | // with values. |
| 726 | if (S->getType()->isRecordType() && S->isPRValue()) { |
| 727 | RecordStorageLocation &Loc = Env.getResultObjectLocation(RecordPRValue: *S); |
| 728 | Env.initializeFieldsWithValues(Loc); |
| 729 | } |
| 730 | } |
| 731 | } |
| 732 | |
| 733 | void VisitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *S) { |
| 734 | const Expr *SubExpr = S->getSubExpr(); |
| 735 | assert(SubExpr != nullptr); |
| 736 | |
| 737 | StorageLocation &Loc = Env.createStorageLocation(E: *S); |
| 738 | Env.setStorageLocation(E: *S, Loc); |
| 739 | |
| 740 | if (SubExpr->getType()->isRecordType()) |
| 741 | // Nothing else left to do -- we initialized the record when transferring |
| 742 | // `SubExpr`. |
| 743 | return; |
| 744 | |
| 745 | if (Value *SubExprVal = Env.getValue(E: *SubExpr)) |
| 746 | Env.setValue(Loc, Val&: *SubExprVal); |
| 747 | } |
| 748 | |
| 749 | void VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *S) { |
| 750 | const Expr *SubExpr = S->getSubExpr(); |
| 751 | assert(SubExpr != nullptr); |
| 752 | |
| 753 | propagateValue(From: *SubExpr, To: *S, Env); |
| 754 | } |
| 755 | |
| 756 | void VisitConditionalOperator(const ConditionalOperator *S) { |
| 757 | const Environment *TrueEnv = StmtToEnv.getEnvironment(S: *S->getTrueExpr()); |
| 758 | const Environment *FalseEnv = StmtToEnv.getEnvironment(S: *S->getFalseExpr()); |
| 759 | |
| 760 | if (TrueEnv == nullptr || FalseEnv == nullptr) { |
| 761 | // If the true or false branch is dead, we may not have an environment for |
| 762 | // it. We could handle this specifically by forwarding the value or |
| 763 | // location of the live branch, but this case is rare enough that this |
| 764 | // probably isn't worth the additional complexity. |
| 765 | return; |
| 766 | } |
| 767 | |
| 768 | if (S->isGLValue()) { |
| 769 | StorageLocation *TrueLoc = TrueEnv->getStorageLocation(E: *S->getTrueExpr()); |
| 770 | StorageLocation *FalseLoc = |
| 771 | FalseEnv->getStorageLocation(E: *S->getFalseExpr()); |
| 772 | if (TrueLoc == FalseLoc && TrueLoc != nullptr) { |
| 773 | Env.setStorageLocation(E: *S, Loc&: *TrueLoc); |
| 774 | } else if (!S->getType()->isRecordType()) { |
| 775 | // Ideally, we would have something like an "alias set" to say that the |
| 776 | // result StorageLocation can be either of the locations from the |
| 777 | // TrueEnv or FalseEnv. Then, when this ConditionalOperator is |
| 778 | // (a) used in an LValueToRValue cast, the value is the join of all of |
| 779 | // the values in the alias set. |
| 780 | // (b) or, used in an assignment to the resulting LValue, the assignment |
| 781 | // *may* update all of the locations in the alias set. |
| 782 | // For now, we do the simpler thing of creating a new StorageLocation |
| 783 | // and joining the values right away, handling only case (a). |
| 784 | // Otherwise, the dataflow framework needs to be updated be able to |
| 785 | // represent alias sets and weak updates (for the "may"). |
| 786 | if (Value *Val = Environment::joinValues( |
| 787 | Ty: S->getType(), Val1: TrueEnv->getValue(E: *S->getTrueExpr()), Env1: *TrueEnv, |
| 788 | Val2: FalseEnv->getValue(E: *S->getFalseExpr()), Env2: *FalseEnv, JoinedEnv&: Env, |
| 789 | Model)) { |
| 790 | StorageLocation &Loc = Env.createStorageLocation(E: *S); |
| 791 | Env.setStorageLocation(E: *S, Loc); |
| 792 | Env.setValue(Loc, Val&: *Val); |
| 793 | } |
| 794 | } |
| 795 | } else if (!S->getType()->isRecordType()) { |
| 796 | // The conditional operator can evaluate to either of the values of the |
| 797 | // two branches. To model this, join these two values together to yield |
| 798 | // the result of the conditional operator. |
| 799 | // Note: Most joins happen in `computeBlockInputState()`, but this case is |
| 800 | // different: |
| 801 | // - `computeBlockInputState()` (which in turn calls `Environment::join()` |
| 802 | // joins values associated with the _same_ expression or storage |
| 803 | // location, then associates the joined value with that expression or |
| 804 | // storage location. This join has nothing to do with transfer -- |
| 805 | // instead, it joins together the results of performing transfer on two |
| 806 | // different blocks. |
| 807 | // - Here, we join values associated with _different_ expressions (the |
| 808 | // true and false branch), then associate the joined value with a third |
| 809 | // expression (the conditional operator itself). This join is what it |
| 810 | // means to perform transfer on the conditional operator. |
| 811 | if (Value *Val = Environment::joinValues( |
| 812 | Ty: S->getType(), Val1: TrueEnv->getValue(E: *S->getTrueExpr()), Env1: *TrueEnv, |
| 813 | Val2: FalseEnv->getValue(E: *S->getFalseExpr()), Env2: *FalseEnv, JoinedEnv&: Env, Model)) |
| 814 | Env.setValue(E: *S, Val&: *Val); |
| 815 | } |
| 816 | } |
| 817 | |
| 818 | void VisitInitListExpr(const InitListExpr *S) { |
| 819 | QualType Type = S->getType(); |
| 820 | |
| 821 | if (!Type->isRecordType()) { |
| 822 | // Until array initialization is implemented, we skip arrays and don't |
| 823 | // need to care about cases where `getNumInits() > 1`. |
| 824 | if (!Type->isArrayType() && S->getNumInits() == 1) |
| 825 | propagateValueOrStorageLocation(From: *S->getInit(Init: 0), To: *S, Env); |
| 826 | return; |
| 827 | } |
| 828 | |
| 829 | // If the initializer list is transparent, there's nothing to do. |
| 830 | if (S->isSemanticForm() && S->isTransparent()) |
| 831 | return; |
| 832 | |
| 833 | RecordStorageLocation &Loc = Env.getResultObjectLocation(RecordPRValue: *S); |
| 834 | |
| 835 | // Initialization of base classes and fields of record type happens when we |
| 836 | // visit the nested `CXXConstructExpr` or `InitListExpr` for that base class |
| 837 | // or field. We therefore only need to deal with fields of non-record type |
| 838 | // here. |
| 839 | |
| 840 | RecordInitListHelper InitListHelper(S); |
| 841 | |
| 842 | for (auto [Field, Init] : InitListHelper.field_inits()) { |
| 843 | if (Field->getType()->isRecordType()) |
| 844 | continue; |
| 845 | if (Field->getType()->isReferenceType()) { |
| 846 | assert(Field->getType().getCanonicalType()->getPointeeType() == |
| 847 | Init->getType().getCanonicalType()); |
| 848 | Loc.setChild(D: *Field, Loc: &Env.createObject(Ty: Field->getType(), InitExpr: Init)); |
| 849 | continue; |
| 850 | } |
| 851 | assert(Field->getType().getCanonicalType().getUnqualifiedType() == |
| 852 | Init->getType().getCanonicalType().getUnqualifiedType()); |
| 853 | StorageLocation *FieldLoc = Loc.getChild(D: *Field); |
| 854 | // Locations for non-reference fields must always be non-null. |
| 855 | assert(FieldLoc != nullptr); |
| 856 | Value *Val = Env.getValue(E: *Init); |
| 857 | if (Val == nullptr && isa<ImplicitValueInitExpr>(Val: Init) && |
| 858 | Init->getType()->isPointerType()) |
| 859 | Val = |
| 860 | &Env.getOrCreateNullPointerValue(PointeeType: Init->getType()->getPointeeType()); |
| 861 | if (Val == nullptr) |
| 862 | Val = Env.createValue(Type: Field->getType()); |
| 863 | if (Val != nullptr) |
| 864 | Env.setValue(Loc: *FieldLoc, Val&: *Val); |
| 865 | } |
| 866 | |
| 867 | for (const auto &[FieldName, FieldLoc] : Loc.synthetic_fields()) { |
| 868 | QualType FieldType = FieldLoc->getType(); |
| 869 | if (FieldType->isRecordType()) { |
| 870 | Env.initializeFieldsWithValues(Loc&: *cast<RecordStorageLocation>(Val: FieldLoc)); |
| 871 | } else { |
| 872 | if (Value *Val = Env.createValue(Type: FieldType)) |
| 873 | Env.setValue(Loc: *FieldLoc, Val&: *Val); |
| 874 | } |
| 875 | } |
| 876 | |
| 877 | // FIXME: Implement array initialization. |
| 878 | } |
| 879 | |
| 880 | void VisitCXXBoolLiteralExpr(const CXXBoolLiteralExpr *S) { |
| 881 | Env.setValue(E: *S, Val&: Env.getBoolLiteralValue(Value: S->getValue())); |
| 882 | } |
| 883 | |
| 884 | void VisitIntegerLiteral(const IntegerLiteral *S) { |
| 885 | Env.setValue(E: *S, Val&: Env.getIntLiteralValue(Value: S->getValue())); |
| 886 | } |
| 887 | |
| 888 | void VisitParenExpr(const ParenExpr *S) { |
| 889 | // The CFG does not contain `ParenExpr` as top-level statements in basic |
| 890 | // blocks, however manual traversal to sub-expressions may encounter them. |
| 891 | // Redirect to the sub-expression. |
| 892 | auto *SubExpr = S->getSubExpr(); |
| 893 | assert(SubExpr != nullptr); |
| 894 | Visit(S: SubExpr); |
| 895 | } |
| 896 | |
| 897 | void VisitExprWithCleanups(const ExprWithCleanups *S) { |
| 898 | // The CFG does not contain `ExprWithCleanups` as top-level statements in |
| 899 | // basic blocks, however manual traversal to sub-expressions may encounter |
| 900 | // them. Redirect to the sub-expression. |
| 901 | auto *SubExpr = S->getSubExpr(); |
| 902 | assert(SubExpr != nullptr); |
| 903 | Visit(S: SubExpr); |
| 904 | } |
| 905 | |
| 906 | private: |
| 907 | /// Returns the value for the sub-expression `SubExpr` of a logic operator. |
| 908 | BoolValue &getLogicOperatorSubExprValue(const Expr &SubExpr) { |
| 909 | // `SubExpr` and its parent logic operator might be part of different basic |
| 910 | // blocks. We try to access the value that is assigned to `SubExpr` in the |
| 911 | // corresponding environment. |
| 912 | if (const Environment *SubExprEnv = StmtToEnv.getEnvironment(S: SubExpr)) |
| 913 | if (auto *Val = |
| 914 | dyn_cast_or_null<BoolValue>(Val: SubExprEnv->getValue(E: SubExpr))) |
| 915 | return *Val; |
| 916 | |
| 917 | // The sub-expression may lie within a basic block that isn't reachable, |
| 918 | // even if we need it to evaluate the current (reachable) expression |
| 919 | // (see https://discourse.llvm.org/t/70775). In this case, visit `SubExpr` |
| 920 | // within the current environment and then try to get the value that gets |
| 921 | // assigned to it. |
| 922 | if (Env.getValue(E: SubExpr) == nullptr) |
| 923 | Visit(S: &SubExpr); |
| 924 | if (auto *Val = dyn_cast_or_null<BoolValue>(Val: Env.getValue(E: SubExpr))) |
| 925 | return *Val; |
| 926 | |
| 927 | // If the value of `SubExpr` is still unknown, we create a fresh symbolic |
| 928 | // boolean value for it. |
| 929 | return Env.makeAtomicBoolValue(); |
| 930 | } |
| 931 | |
| 932 | // If context sensitivity is enabled, try to analyze the body of the callee |
| 933 | // `F` of `S`. The type `E` must be either `CallExpr` or `CXXConstructExpr`. |
| 934 | template <typename E> |
| 935 | void transferInlineCall(const E *S, const FunctionDecl *F) { |
| 936 | const auto &Options = Env.getDataflowAnalysisContext().getOptions(); |
| 937 | if (!(Options.ContextSensitiveOpts && |
| 938 | Env.canDescend(MaxDepth: Options.ContextSensitiveOpts->Depth, Callee: F))) |
| 939 | return; |
| 940 | |
| 941 | const AdornedCFG *ACFG = Env.getDataflowAnalysisContext().getAdornedCFG(F); |
| 942 | if (!ACFG) |
| 943 | return; |
| 944 | |
| 945 | // FIXME: We don't support context-sensitive analysis of recursion, so |
| 946 | // we should return early here if `F` is the same as the `FunctionDecl` |
| 947 | // holding `S` itself. |
| 948 | |
| 949 | auto ExitBlock = ACFG->getCFG().getExit().getBlockID(); |
| 950 | |
| 951 | auto CalleeEnv = Env.pushCall(S); |
| 952 | |
| 953 | // FIXME: Use the same analysis as the caller for the callee. Note, |
| 954 | // though, that doing so would require support for changing the analysis's |
| 955 | // ASTContext. |
| 956 | auto Analysis = NoopAnalysis(ACFG->getDecl().getASTContext(), |
| 957 | DataflowAnalysisOptions{.BuiltinOpts: Options}); |
| 958 | |
| 959 | auto BlockToOutputState = |
| 960 | dataflow::runDataflowAnalysis(*ACFG, Analysis, CalleeEnv); |
| 961 | assert(BlockToOutputState); |
| 962 | assert(ExitBlock < BlockToOutputState->size()); |
| 963 | |
| 964 | auto &ExitState = (*BlockToOutputState)[ExitBlock]; |
| 965 | assert(ExitState); |
| 966 | |
| 967 | Env.popCall(S, ExitState->Env); |
| 968 | } |
| 969 | |
| 970 | const StmtToEnvMap &StmtToEnv; |
| 971 | Environment &Env; |
| 972 | Environment::ValueModel &Model; |
| 973 | }; |
| 974 | |
| 975 | } // namespace |
| 976 | |
| 977 | void transfer(const StmtToEnvMap &StmtToEnv, const Stmt &S, Environment &Env, |
| 978 | Environment::ValueModel &Model) { |
| 979 | TransferVisitor(StmtToEnv, Env, Model).Visit(S: &S); |
| 980 | } |
| 981 | |
| 982 | } // namespace dataflow |
| 983 | } // namespace clang |
| 984 | |