1//===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines the C++ expression evaluation engine.
10//
11//===----------------------------------------------------------------------===//
12
13#include "clang/AST/ASTContext.h"
14#include "clang/AST/AttrIterator.h"
15#include "clang/AST/DeclCXX.h"
16#include "clang/AST/ParentMap.h"
17#include "clang/AST/StmtCXX.h"
18#include "clang/Analysis/ConstructionContext.h"
19#include "clang/Basic/PrettyStackTrace.h"
20#include "clang/StaticAnalyzer/Core/CheckerManager.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
22#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
23#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
24#include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
25#include "llvm/ADT/STLExtras.h"
26#include "llvm/ADT/Sequence.h"
27#include "llvm/Support/Casting.h"
28#include <optional>
29
30using namespace clang;
31using namespace ento;
32
33void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
34 ExplodedNode *Pred,
35 ExplodedNodeSet &Dst) {
36 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
37 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
38 ProgramStateRef state = Pred->getState();
39 const LocationContext *LCtx = Pred->getLocationContext();
40
41 state = createTemporaryRegionIfNeeded(State: state, LC: LCtx, InitWithAdjustments: tempExpr, Result: ME);
42 Bldr.generateNode(S: ME, Pred, St: state);
43}
44
45// FIXME: This is the sort of code that should eventually live in a Core
46// checker rather than as a special case in ExprEngine.
47void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
48 const CallEvent &Call) {
49 SVal ThisVal;
50 bool AlwaysReturnsLValue;
51 [[maybe_unused]] const CXXRecordDecl *ThisRD = nullptr;
52 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(Val: &Call)) {
53 assert(Ctor->getDecl()->isTrivial());
54 assert(Ctor->getDecl()->isCopyOrMoveConstructor());
55 ThisVal = Ctor->getCXXThisVal();
56 ThisRD = Ctor->getDecl()->getParent();
57 AlwaysReturnsLValue = false;
58 } else {
59 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
60 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
61 OO_Equal);
62 ThisVal = cast<CXXInstanceCall>(Val: Call).getCXXThisVal();
63 ThisRD = cast<CXXMethodDecl>(Val: Call.getDecl())->getParent();
64 AlwaysReturnsLValue = true;
65 }
66
67 const LocationContext *LCtx = Pred->getLocationContext();
68 const Expr *CallExpr = Call.getOriginExpr();
69
70 ExplodedNodeSet Dst;
71 Bldr.takeNodes(N: Pred);
72
73 assert(ThisRD);
74
75 if (!ThisRD->isEmpty()) {
76 SVal V = Call.getArgSVal(Index: 0);
77 const Expr *VExpr = Call.getArgExpr(Index: 0);
78
79 // If the value being copied is not unknown, load from its location to get
80 // an aggregate rvalue.
81 if (std::optional<Loc> L = V.getAs<Loc>())
82 V = Pred->getState()->getSVal(LV: *L);
83 else
84 assert(V.isUnknownOrUndef());
85
86 ExplodedNodeSet Tmp;
87 evalLocation(Dst&: Tmp, NodeEx: CallExpr, BoundEx: VExpr, Pred, St: Pred->getState(), location: V,
88 /*isLoad=*/true);
89 for (ExplodedNode *N : Tmp)
90 evalBind(Dst, StoreE: CallExpr, Pred: N, location: ThisVal, Val: V, AtDeclInit: !AlwaysReturnsLValue);
91 } else {
92 // We can't copy empty classes because of empty base class optimization.
93 // In that case, copying the empty base class subobject would overwrite the
94 // object that it overlaps with - so let's not do that.
95 // See issue-157467.cpp for an example.
96 Dst.Add(N: Pred);
97 }
98
99 PostStmt PS(CallExpr, LCtx);
100 for (ExplodedNode *N : Dst) {
101 ProgramStateRef State = N->getState();
102 if (AlwaysReturnsLValue)
103 State = State->BindExpr(S: CallExpr, LCtx, V: ThisVal);
104 else
105 State = bindReturnValue(Call, LCtx, State);
106 Bldr.generateNode(PP: PS, State, Pred: N);
107 }
108}
109
110SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue,
111 QualType &Ty, bool &IsArray, unsigned Idx) {
112 SValBuilder &SVB = State->getStateManager().getSValBuilder();
113 ASTContext &Ctx = SVB.getContext();
114
115 if (const ArrayType *AT = Ctx.getAsArrayType(T: Ty)) {
116 while (AT) {
117 Ty = AT->getElementType();
118 AT = dyn_cast<ArrayType>(Val: AT->getElementType());
119 }
120 LValue = State->getLValue(ElementType: Ty, Idx: SVB.makeArrayIndex(idx: Idx), Base: LValue);
121 IsArray = true;
122 }
123
124 return LValue;
125}
126
127// In case when the prvalue is returned from the function (kind is one of
128// SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then
129// it's materialization happens in context of the caller.
130// We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context.
131SVal ExprEngine::computeObjectUnderConstruction(
132 const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx,
133 const LocationContext *LCtx, const ConstructionContext *CC,
134 EvalCallOptions &CallOpts, unsigned Idx) {
135
136 SValBuilder &SVB = getSValBuilder();
137 MemRegionManager &MRMgr = SVB.getRegionManager();
138 ASTContext &ACtx = SVB.getContext();
139
140 // Compute the target region by exploring the construction context.
141 if (CC) {
142 switch (CC->getKind()) {
143 case ConstructionContext::CXX17ElidedCopyVariableKind:
144 case ConstructionContext::SimpleVariableKind: {
145 const auto *DSCC = cast<VariableConstructionContext>(Val: CC);
146 const auto *DS = DSCC->getDeclStmt();
147 const auto *Var = cast<VarDecl>(Val: DS->getSingleDecl());
148 QualType Ty = Var->getType();
149 return makeElementRegion(State, LValue: State->getLValue(VD: Var, LC: LCtx), Ty,
150 IsArray&: CallOpts.IsArrayCtorOrDtor, Idx);
151 }
152 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
153 case ConstructionContext::SimpleConstructorInitializerKind: {
154 const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC);
155 const auto *Init = ICC->getCXXCtorInitializer();
156 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl());
157 Loc ThisPtr = SVB.getCXXThis(D: CurCtor, SFC: LCtx->getStackFrame());
158 SVal ThisVal = State->getSVal(LV: ThisPtr);
159 if (Init->isBaseInitializer()) {
160 const auto *ThisReg = cast<SubRegion>(Val: ThisVal.getAsRegion());
161 const CXXRecordDecl *BaseClass =
162 Init->getBaseClass()->getAsCXXRecordDecl();
163 const auto *BaseReg =
164 MRMgr.getCXXBaseObjectRegion(BaseClass, Super: ThisReg,
165 IsVirtual: Init->isBaseVirtual());
166 return SVB.makeLoc(region: BaseReg);
167 }
168 if (Init->isDelegatingInitializer())
169 return ThisVal;
170
171 const ValueDecl *Field;
172 SVal FieldVal;
173 if (Init->isIndirectMemberInitializer()) {
174 Field = Init->getIndirectMember();
175 FieldVal = State->getLValue(decl: Init->getIndirectMember(), Base: ThisVal);
176 } else {
177 Field = Init->getMember();
178 FieldVal = State->getLValue(decl: Init->getMember(), Base: ThisVal);
179 }
180
181 QualType Ty = Field->getType();
182 return makeElementRegion(State, LValue: FieldVal, Ty, IsArray&: CallOpts.IsArrayCtorOrDtor,
183 Idx);
184 }
185 case ConstructionContext::NewAllocatedObjectKind: {
186 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
187 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(Val: CC);
188 const auto *NE = NECC->getCXXNewExpr();
189 SVal V = *getObjectUnderConstruction(State, Item: NE, LC: LCtx);
190 if (const SubRegion *MR =
191 dyn_cast_or_null<SubRegion>(Val: V.getAsRegion())) {
192 if (NE->isArray()) {
193 CallOpts.IsArrayCtorOrDtor = true;
194
195 auto Ty = NE->getType()->getPointeeType();
196 while (const auto *AT = getContext().getAsArrayType(T: Ty))
197 Ty = AT->getElementType();
198
199 auto R = MRMgr.getElementRegion(elementType: Ty, Idx: svalBuilder.makeArrayIndex(idx: Idx),
200 superRegion: MR, Ctx: SVB.getContext());
201
202 return loc::MemRegionVal(R);
203 }
204 return V;
205 }
206 // TODO: Detect when the allocator returns a null pointer.
207 // Constructor shall not be called in this case.
208 }
209 break;
210 }
211 case ConstructionContext::SimpleReturnedValueKind:
212 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
213 // The temporary is to be managed by the parent stack frame.
214 // So build it in the parent stack frame if we're not in the
215 // top frame of the analysis.
216 const StackFrameContext *SFC = LCtx->getStackFrame();
217 if (const LocationContext *CallerLCtx = SFC->getParent()) {
218 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
219 .getAs<CFGCXXRecordTypedCall>();
220 if (!RTC) {
221 // We were unable to find the correct construction context for the
222 // call in the parent stack frame. This is equivalent to not being
223 // able to find construction context at all.
224 break;
225 }
226 if (isa<BlockInvocationContext>(Val: CallerLCtx)) {
227 // Unwrap block invocation contexts. They're mostly part of
228 // the current stack frame.
229 CallerLCtx = CallerLCtx->getParent();
230 assert(!isa<BlockInvocationContext>(CallerLCtx));
231 }
232
233 NodeBuilderContext CallerBldrCtx(getCoreEngine(),
234 SFC->getCallSiteBlock(), CallerLCtx);
235 return computeObjectUnderConstruction(
236 E: cast<Expr>(Val: SFC->getCallSite()), State, BldrCtx: &CallerBldrCtx, LCtx: CallerLCtx,
237 CC: RTC->getConstructionContext(), CallOpts);
238 } else {
239 // We are on the top frame of the analysis. We do not know where is the
240 // object returned to. Conjure a symbolic region for the return value.
241 // TODO: We probably need a new MemRegion kind to represent the storage
242 // of that SymbolicRegion, so that we could produce a fancy symbol
243 // instead of an anonymous conjured symbol.
244 // TODO: Do we need to track the region to avoid having it dead
245 // too early? It does die too early, at least in C++17, but because
246 // putting anything into a SymbolicRegion causes an immediate escape,
247 // it doesn't cause any leak false positives.
248 const auto *RCC = cast<ReturnedValueConstructionContext>(Val: CC);
249 // Make sure that this doesn't coincide with any other symbol
250 // conjured for the returned expression.
251 static const int TopLevelSymRegionTag = 0;
252 const Expr *RetE = RCC->getReturnStmt()->getRetValue();
253 assert(RetE && "Void returns should not have a construction context");
254 QualType ReturnTy = RetE->getType();
255 QualType RegionTy = ACtx.getPointerType(T: ReturnTy);
256 return SVB.conjureSymbolVal(symbolTag: &TopLevelSymRegionTag, elem: getCFGElementRef(),
257 LCtx: SFC, type: RegionTy, count: currBldrCtx->blockCount());
258 }
259 llvm_unreachable("Unhandled return value construction context!");
260 }
261 case ConstructionContext::ElidedTemporaryObjectKind: {
262 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
263 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC);
264
265 // Support pre-C++17 copy elision. We'll have the elidable copy
266 // constructor in the AST and in the CFG, but we'll skip it
267 // and construct directly into the final object. This call
268 // also sets the CallOpts flags for us.
269 // If the elided copy/move constructor is not supported, there's still
270 // benefit in trying to model the non-elided constructor.
271 // Stash our state before trying to elide, as it'll get overwritten.
272 ProgramStateRef PreElideState = State;
273 EvalCallOptions PreElideCallOpts = CallOpts;
274
275 SVal V = computeObjectUnderConstruction(
276 E: TCC->getConstructorAfterElision(), State, BldrCtx, LCtx,
277 CC: TCC->getConstructionContextAfterElision(), CallOpts);
278
279 // FIXME: This definition of "copy elision has not failed" is unreliable.
280 // It doesn't indicate that the constructor will actually be inlined
281 // later; this is still up to evalCall() to decide.
282 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
283 return V;
284
285 // Copy elision failed. Revert the changes and proceed as if we have
286 // a simple temporary.
287 CallOpts = PreElideCallOpts;
288 CallOpts.IsElidableCtorThatHasNotBeenElided = true;
289 [[fallthrough]];
290 }
291 case ConstructionContext::SimpleTemporaryObjectKind: {
292 const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC);
293 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
294
295 CallOpts.IsTemporaryCtorOrDtor = true;
296 if (MTE) {
297 if (const ValueDecl *VD = MTE->getExtendingDecl()) {
298 StorageDuration SD = MTE->getStorageDuration();
299 assert(SD != SD_FullExpression);
300 if (!VD->getType()->isReferenceType()) {
301 // We're lifetime-extended by a surrounding aggregate.
302 // Automatic destructors aren't quite working in this case
303 // on the CFG side. We should warn the caller about that.
304 // FIXME: Is there a better way to retrieve this information from
305 // the MaterializeTemporaryExpr?
306 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
307 }
308
309 if (SD == SD_Static || SD == SD_Thread)
310 return loc::MemRegionVal(
311 MRMgr.getCXXStaticLifetimeExtendedObjectRegion(Ex: E, VD));
312
313 return loc::MemRegionVal(
314 MRMgr.getCXXLifetimeExtendedObjectRegion(Ex: E, VD, LC: LCtx));
315 }
316 assert(MTE->getStorageDuration() == SD_FullExpression);
317 }
318
319 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
320 }
321 case ConstructionContext::LambdaCaptureKind: {
322 CallOpts.IsTemporaryCtorOrDtor = true;
323
324 const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC);
325
326 SVal Base = loc::MemRegionVal(
327 MRMgr.getCXXTempObjectRegion(Ex: LCC->getInitializer(), LC: LCtx));
328
329 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(Val: E);
330 if (getIndexOfElementToConstruct(State, E: CE, LCtx)) {
331 CallOpts.IsArrayCtorOrDtor = true;
332 Base = State->getLValue(ElementType: E->getType(), Idx: svalBuilder.makeArrayIndex(idx: Idx),
333 Base);
334 }
335
336 return Base;
337 }
338 case ConstructionContext::ArgumentKind: {
339 // Arguments are technically temporaries.
340 CallOpts.IsTemporaryCtorOrDtor = true;
341
342 const auto *ACC = cast<ArgumentConstructionContext>(Val: CC);
343 const Expr *E = ACC->getCallLikeExpr();
344 unsigned Idx = ACC->getIndex();
345
346 CallEventManager &CEMgr = getStateManager().getCallEventManager();
347 auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> {
348 const LocationContext *FutureSFC =
349 Caller->getCalleeStackFrame(BlockCount: BldrCtx->blockCount());
350 // Return early if we are unable to reliably foresee
351 // the future stack frame.
352 if (!FutureSFC)
353 return std::nullopt;
354
355 // This should be equivalent to Caller->getDecl() for now, but
356 // FutureSFC->getDecl() is likely to support better stuff (like
357 // virtual functions) earlier.
358 const Decl *CalleeD = FutureSFC->getDecl();
359
360 // FIXME: Support for variadic arguments is not implemented here yet.
361 if (CallEvent::isVariadic(D: CalleeD))
362 return std::nullopt;
363
364 // Operator arguments do not correspond to operator parameters
365 // because this-argument is implemented as a normal argument in
366 // operator call expressions but not in operator declarations.
367 const TypedValueRegion *TVR = Caller->getParameterLocation(
368 Index: *Caller->getAdjustedParameterIndex(ASTArgumentIndex: Idx), BlockCount: BldrCtx->blockCount());
369 if (!TVR)
370 return std::nullopt;
371
372 return loc::MemRegionVal(TVR);
373 };
374
375 if (const auto *CE = dyn_cast<CallExpr>(Val: E)) {
376 CallEventRef<> Caller =
377 CEMgr.getSimpleCall(E: CE, State, LCtx, ElemRef: getCFGElementRef());
378 if (std::optional<SVal> V = getArgLoc(Caller))
379 return *V;
380 else
381 break;
382 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(Val: E)) {
383 // Don't bother figuring out the target region for the future
384 // constructor because we won't need it.
385 CallEventRef<> Caller = CEMgr.getCXXConstructorCall(
386 E: CCE, /*Target=*/nullptr, State, LCtx, ElemRef: getCFGElementRef());
387 if (std::optional<SVal> V = getArgLoc(Caller))
388 return *V;
389 else
390 break;
391 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(Val: E)) {
392 CallEventRef<> Caller =
393 CEMgr.getObjCMethodCall(E: ME, State, LCtx, ElemRef: getCFGElementRef());
394 if (std::optional<SVal> V = getArgLoc(Caller))
395 return *V;
396 else
397 break;
398 }
399 }
400 } // switch (CC->getKind())
401 }
402
403 // If we couldn't find an existing region to construct into, assume we're
404 // constructing a temporary. Notify the caller of our failure.
405 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
406 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
407}
408
409ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
410 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
411 const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
412 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
413 // Sounds like we failed to find the target region and therefore
414 // copy elision failed. There's nothing we can do about it here.
415 return State;
416 }
417
418 // See if we're constructing an existing region by looking at the
419 // current construction context.
420 assert(CC && "Computed target region without construction context?");
421 switch (CC->getKind()) {
422 case ConstructionContext::CXX17ElidedCopyVariableKind:
423 case ConstructionContext::SimpleVariableKind: {
424 const auto *DSCC = cast<VariableConstructionContext>(Val: CC);
425 return addObjectUnderConstruction(State, Item: DSCC->getDeclStmt(), LC: LCtx, V);
426 }
427 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
428 case ConstructionContext::SimpleConstructorInitializerKind: {
429 const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC);
430 const auto *Init = ICC->getCXXCtorInitializer();
431 // Base and delegating initializers handled above
432 assert(Init->isAnyMemberInitializer() &&
433 "Base and delegating initializers should have been handled by"
434 "computeObjectUnderConstruction()");
435 return addObjectUnderConstruction(State, Item: Init, LC: LCtx, V);
436 }
437 case ConstructionContext::NewAllocatedObjectKind: {
438 return State;
439 }
440 case ConstructionContext::SimpleReturnedValueKind:
441 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
442 const StackFrameContext *SFC = LCtx->getStackFrame();
443 const LocationContext *CallerLCtx = SFC->getParent();
444 if (!CallerLCtx) {
445 // No extra work is necessary in top frame.
446 return State;
447 }
448
449 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
450 .getAs<CFGCXXRecordTypedCall>();
451 assert(RTC && "Could not have had a target region without it");
452 if (isa<BlockInvocationContext>(Val: CallerLCtx)) {
453 // Unwrap block invocation contexts. They're mostly part of
454 // the current stack frame.
455 CallerLCtx = CallerLCtx->getParent();
456 assert(!isa<BlockInvocationContext>(CallerLCtx));
457 }
458
459 return updateObjectsUnderConstruction(V,
460 E: cast<Expr>(Val: SFC->getCallSite()), State, LCtx: CallerLCtx,
461 CC: RTC->getConstructionContext(), CallOpts);
462 }
463 case ConstructionContext::ElidedTemporaryObjectKind: {
464 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
465 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
466 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC);
467 State = updateObjectsUnderConstruction(
468 V, E: TCC->getConstructorAfterElision(), State, LCtx,
469 CC: TCC->getConstructionContextAfterElision(), CallOpts);
470
471 // Remember that we've elided the constructor.
472 State = addObjectUnderConstruction(
473 State, Item: TCC->getConstructorAfterElision(), LC: LCtx, V);
474
475 // Remember that we've elided the destructor.
476 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
477 State = elideDestructor(State, BTE, LC: LCtx);
478
479 // Instead of materialization, shamelessly return
480 // the final object destination.
481 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
482 State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V);
483
484 return State;
485 }
486 // If we decided not to elide the constructor, proceed as if
487 // it's a simple temporary.
488 [[fallthrough]];
489 }
490 case ConstructionContext::SimpleTemporaryObjectKind: {
491 const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC);
492 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
493 State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V);
494
495 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
496 State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V);
497
498 return State;
499 }
500 case ConstructionContext::LambdaCaptureKind: {
501 const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC);
502
503 // If we capture and array, we want to store the super region, not a
504 // sub-region.
505 if (const auto *EL = dyn_cast_or_null<ElementRegion>(Val: V.getAsRegion()))
506 V = loc::MemRegionVal(EL->getSuperRegion());
507
508 return addObjectUnderConstruction(
509 State, Item: {LCC->getLambdaExpr(), LCC->getIndex()}, LC: LCtx, V);
510 }
511 case ConstructionContext::ArgumentKind: {
512 const auto *ACC = cast<ArgumentConstructionContext>(Val: CC);
513 if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
514 State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V);
515
516 return addObjectUnderConstruction(
517 State, Item: {ACC->getCallLikeExpr(), ACC->getIndex()}, LC: LCtx, V);
518 }
519 }
520 llvm_unreachable("Unhandled construction context!");
521}
522
523static ProgramStateRef
524bindRequiredArrayElementToEnvironment(ProgramStateRef State,
525 const ArrayInitLoopExpr *AILE,
526 const LocationContext *LCtx, NonLoc Idx) {
527 SValBuilder &SVB = State->getStateManager().getSValBuilder();
528 MemRegionManager &MRMgr = SVB.getRegionManager();
529 ASTContext &Ctx = SVB.getContext();
530
531 // HACK: There is no way we can put the index of the array element into the
532 // CFG unless we unroll the loop, so we manually select and bind the required
533 // parameter to the environment.
534 const Expr *SourceArray = AILE->getCommonExpr()->getSourceExpr();
535 const auto *Ctor =
536 cast<CXXConstructExpr>(Val: extractElementInitializerFromNestedAILE(AILE));
537
538 const auto *SourceArrayRegion =
539 cast<SubRegion>(Val: State->getSVal(Ex: SourceArray, LCtx).getAsRegion());
540 const ElementRegion *ElementRegion =
541 MRMgr.getElementRegion(elementType: Ctor->getType(), Idx, superRegion: SourceArrayRegion, Ctx);
542
543 return State->BindExpr(S: Ctor->getArg(Arg: 0), LCtx,
544 V: loc::MemRegionVal(ElementRegion));
545}
546
547void ExprEngine::handleConstructor(const Expr *E,
548 ExplodedNode *Pred,
549 ExplodedNodeSet &destNodes) {
550 const auto *CE = dyn_cast<CXXConstructExpr>(Val: E);
551 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(Val: E);
552 assert(CE || CIE);
553
554 const LocationContext *LCtx = Pred->getLocationContext();
555 ProgramStateRef State = Pred->getState();
556
557 SVal Target = UnknownVal();
558
559 if (CE) {
560 if (std::optional<SVal> ElidedTarget =
561 getObjectUnderConstruction(State, Item: CE, LC: LCtx)) {
562 // We've previously modeled an elidable constructor by pretending that
563 // it in fact constructs into the correct target. This constructor can
564 // therefore be skipped.
565 Target = *ElidedTarget;
566 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
567 State = finishObjectConstruction(State, Item: CE, LC: LCtx);
568 if (auto L = Target.getAs<Loc>())
569 State = State->BindExpr(S: CE, LCtx, V: State->getSVal(LV: *L, T: CE->getType()));
570 Bldr.generateNode(S: CE, Pred, St: State);
571 return;
572 }
573 }
574
575 EvalCallOptions CallOpts;
576 auto C = getCurrentCFGElement().getAs<CFGConstructor>();
577 assert(C || getCurrentCFGElement().getAs<CFGStmt>());
578 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
579
580 const CXXConstructionKind CK =
581 CE ? CE->getConstructionKind() : CIE->getConstructionKind();
582 switch (CK) {
583 case CXXConstructionKind::Complete: {
584 // Inherited constructors are always base class constructors.
585 assert(CE && !CIE && "A complete constructor is inherited?!");
586
587 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it
588 // differently.
589 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr;
590
591 unsigned Idx = 0;
592 if (CE->getType()->isArrayType() || AILE) {
593
594 auto isZeroSizeArray = [&] {
595 uint64_t Size = 1;
596
597 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val: CE->getType()))
598 Size = getContext().getConstantArrayElementCount(CA: CAT);
599 else if (AILE)
600 Size = getContext().getArrayInitLoopExprElementCount(AILE);
601
602 return Size == 0;
603 };
604
605 // No element construction will happen in a 0 size array.
606 if (isZeroSizeArray()) {
607 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
608 static SimpleProgramPointTag T{"ExprEngine",
609 "Skipping 0 size array construction"};
610 Bldr.generateNode(S: CE, Pred, St: State, tag: &T);
611 return;
612 }
613
614 Idx = getIndexOfElementToConstruct(State, E: CE, LCtx).value_or(u: 0u);
615 State = setIndexOfElementToConstruct(State, E: CE, LCtx, Idx: Idx + 1);
616 }
617
618 if (AILE) {
619 // Only set this once even though we loop through it multiple times.
620 if (!getPendingInitLoop(State, E: CE, LCtx))
621 State = setPendingInitLoop(
622 State, E: CE, LCtx,
623 Idx: getContext().getArrayInitLoopExprElementCount(AILE));
624
625 State = bindRequiredArrayElementToEnvironment(
626 State, AILE, LCtx, Idx: svalBuilder.makeArrayIndex(idx: Idx));
627 }
628
629 // The target region is found from construction context.
630 std::tie(args&: State, args&: Target) = handleConstructionContext(
631 E: CE, State, BldrCtx: currBldrCtx, LCtx, CC, CallOpts, Idx);
632 break;
633 }
634 case CXXConstructionKind::VirtualBase: {
635 // Make sure we are not calling virtual base class initializers twice.
636 // Only the most-derived object should initialize virtual base classes.
637 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
638 Val: LCtx->getStackFrame()->getCallSite());
639 assert(
640 (!OuterCtor ||
641 OuterCtor->getConstructionKind() == CXXConstructionKind::Complete ||
642 OuterCtor->getConstructionKind() == CXXConstructionKind::Delegating) &&
643 ("This virtual base should have already been initialized by "
644 "the most derived class!"));
645 (void)OuterCtor;
646 [[fallthrough]];
647 }
648 case CXXConstructionKind::NonVirtualBase:
649 // In C++17, classes with non-virtual bases may be aggregates, so they would
650 // be initialized as aggregates without a constructor call, so we may have
651 // a base class constructed directly into an initializer list without
652 // having the derived-class constructor call on the previous stack frame.
653 // Initializer lists may be nested into more initializer lists that
654 // correspond to surrounding aggregate initializations.
655 // FIXME: For now this code essentially bails out. We need to find the
656 // correct target region and set it.
657 // FIXME: Instead of relying on the ParentMap, we should have the
658 // trigger-statement (InitListExpr or CXXParenListInitExpr in this case)
659 // passed down from CFG or otherwise always available during construction.
660 if (isa_and_nonnull<InitListExpr, CXXParenListInitExpr>(
661 Val: LCtx->getParentMap().getParent(S: E))) {
662 MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
663 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
664 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
665 break;
666 }
667 [[fallthrough]];
668 case CXXConstructionKind::Delegating: {
669 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl());
670 Loc ThisPtr = getSValBuilder().getCXXThis(D: CurCtor,
671 SFC: LCtx->getStackFrame());
672 SVal ThisVal = State->getSVal(LV: ThisPtr);
673
674 if (CK == CXXConstructionKind::Delegating) {
675 Target = ThisVal;
676 } else {
677 // Cast to the base type.
678 bool IsVirtual = (CK == CXXConstructionKind::VirtualBase);
679 SVal BaseVal =
680 getStoreManager().evalDerivedToBase(Derived: ThisVal, DerivedPtrType: E->getType(), IsVirtual);
681 Target = BaseVal;
682 }
683 break;
684 }
685 }
686
687 if (State != Pred->getState()) {
688 static SimpleProgramPointTag T("ExprEngine",
689 "Prepare for object construction");
690 ExplodedNodeSet DstPrepare;
691 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
692 BldrPrepare.generateNode(S: E, Pred, St: State, tag: &T, K: ProgramPoint::PreStmtKind);
693 assert(DstPrepare.size() <= 1);
694 if (DstPrepare.size() == 0)
695 return;
696 Pred = *BldrPrepare.begin();
697 }
698
699 const MemRegion *TargetRegion = Target.getAsRegion();
700 CallEventManager &CEMgr = getStateManager().getCallEventManager();
701 CallEventRef<> Call =
702 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
703 E: CIE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef())
704 : (CallEventRef<>)CEMgr.getCXXConstructorCall(
705 E: CE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef());
706
707 ExplodedNodeSet DstPreVisit;
708 getCheckerManager().runCheckersForPreStmt(Dst&: DstPreVisit, Src: Pred, S: E, Eng&: *this);
709
710 ExplodedNodeSet PreInitialized;
711 if (CE) {
712 // FIXME: Is it possible and/or useful to do this before PreStmt?
713 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
714 for (ExplodedNode *N : DstPreVisit) {
715 ProgramStateRef State = N->getState();
716 if (CE->requiresZeroInitialization()) {
717 // FIXME: Once we properly handle constructors in new-expressions, we'll
718 // need to invalidate the region before setting a default value, to make
719 // sure there aren't any lingering bindings around. This probably needs
720 // to happen regardless of whether or not the object is zero-initialized
721 // to handle random fields of a placement-initialized object picking up
722 // old bindings. We might only want to do it when we need to, though.
723 // FIXME: This isn't actually correct for arrays -- we need to zero-
724 // initialize the entire array, not just the first element -- but our
725 // handling of arrays everywhere else is weak as well, so this shouldn't
726 // actually make things worse. Placement new makes this tricky as well,
727 // since it's then possible to be initializing one part of a multi-
728 // dimensional array.
729 const CXXRecordDecl *TargetHeldRecord =
730 dyn_cast_or_null<CXXRecordDecl>(Val: CE->getType()->getAsRecordDecl());
731
732 if (!TargetHeldRecord || !TargetHeldRecord->isEmpty())
733 State = State->bindDefaultZero(loc: Target, LCtx);
734 }
735
736 Bldr.generateNode(S: CE, Pred: N, St: State, /*tag=*/nullptr,
737 K: ProgramPoint::PreStmtKind);
738 }
739 } else {
740 PreInitialized = DstPreVisit;
741 }
742
743 ExplodedNodeSet DstPreCall;
744 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: PreInitialized,
745 Call: *Call, Eng&: *this);
746
747 ExplodedNodeSet DstEvaluated;
748
749 if (CE && CE->getConstructor()->isTrivial() &&
750 CE->getConstructor()->isCopyOrMoveConstructor() &&
751 !CallOpts.IsArrayCtorOrDtor) {
752 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
753 // FIXME: Handle other kinds of trivial constructors as well.
754 for (ExplodedNode *N : DstPreCall)
755 performTrivialCopy(Bldr, Pred: N, Call: *Call);
756
757 } else {
758 for (ExplodedNode *N : DstPreCall)
759 getCheckerManager().runCheckersForEvalCall(Dst&: DstEvaluated, Src: N, CE: *Call, Eng&: *this,
760 CallOpts);
761 }
762
763 // If the CFG was constructed without elements for temporary destructors
764 // and the just-called constructor created a temporary object then
765 // stop exploration if the temporary object has a noreturn constructor.
766 // This can lose coverage because the destructor, if it were present
767 // in the CFG, would be called at the end of the full expression or
768 // later (for life-time extended temporaries) -- but avoids infeasible
769 // paths when no-return temporary destructors are used for assertions.
770 ExplodedNodeSet DstEvaluatedPostProcessed;
771 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
772 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
773 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
774 if (llvm::isa_and_nonnull<CXXTempObjectRegion,
775 CXXLifetimeExtendedObjectRegion>(Val: TargetRegion) &&
776 cast<CXXConstructorDecl>(Val: Call->getDecl())
777 ->getParent()
778 ->isAnyDestructorNoReturn()) {
779
780 // If we've inlined the constructor, then DstEvaluated would be empty.
781 // In this case we still want a sink, which could be implemented
782 // in processCallExit. But we don't have that implemented at the moment,
783 // so if you hit this assertion, see if you can avoid inlining
784 // the respective constructor when analyzer-config cfg-temporary-dtors
785 // is set to false.
786 // Otherwise there's nothing wrong with inlining such constructor.
787 assert(!DstEvaluated.empty() &&
788 "We should not have inlined this constructor!");
789
790 for (ExplodedNode *N : DstEvaluated) {
791 Bldr.generateSink(S: E, Pred: N, St: N->getState());
792 }
793
794 // There is no need to run the PostCall and PostStmt checker
795 // callbacks because we just generated sinks on all nodes in th
796 // frontier.
797 return;
798 }
799 }
800
801 ExplodedNodeSet DstPostArgumentCleanup;
802 for (ExplodedNode *I : DstEvaluatedPostProcessed)
803 finishArgumentConstruction(Dst&: DstPostArgumentCleanup, Pred: I, Call: *Call);
804
805 // If there were other constructors called for object-type arguments
806 // of this constructor, clean them up.
807 ExplodedNodeSet DstPostCall;
808 getCheckerManager().runCheckersForPostCall(Dst&: DstPostCall,
809 Src: DstPostArgumentCleanup,
810 Call: *Call, Eng&: *this);
811 getCheckerManager().runCheckersForPostStmt(Dst&: destNodes, Src: DstPostCall, S: E, Eng&: *this);
812}
813
814void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
815 ExplodedNode *Pred,
816 ExplodedNodeSet &Dst) {
817 handleConstructor(E: CE, Pred, destNodes&: Dst);
818}
819
820void ExprEngine::VisitCXXInheritedCtorInitExpr(
821 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
822 ExplodedNodeSet &Dst) {
823 handleConstructor(E: CE, Pred, destNodes&: Dst);
824}
825
826void ExprEngine::VisitCXXDestructor(QualType ObjectType,
827 const MemRegion *Dest,
828 const Stmt *S,
829 bool IsBaseDtor,
830 ExplodedNode *Pred,
831 ExplodedNodeSet &Dst,
832 EvalCallOptions &CallOpts) {
833 assert(S && "A destructor without a trigger!");
834 const LocationContext *LCtx = Pred->getLocationContext();
835 ProgramStateRef State = Pred->getState();
836
837 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
838 assert(RecordDecl && "Only CXXRecordDecls should have destructors");
839 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
840 // FIXME: There should always be a Decl, otherwise the destructor call
841 // shouldn't have been added to the CFG in the first place.
842 if (!DtorDecl) {
843 // Skip the invalid destructor. We cannot simply return because
844 // it would interrupt the analysis instead.
845 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
846 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
847 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx,
848 getCFGElementRef(), &T);
849 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
850 Bldr.generateNode(PP, State: Pred->getState(), Pred);
851 return;
852 }
853
854 if (!Dest) {
855 // We're trying to destroy something that is not a region. This may happen
856 // for a variety of reasons (unknown target region, concrete integer instead
857 // of target region, etc.). The current code makes an attempt to recover.
858 // FIXME: We probably don't really need to recover when we're dealing
859 // with concrete integers specifically.
860 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
861 if (const Expr *E = dyn_cast_or_null<Expr>(Val: S)) {
862 Dest = MRMgr.getCXXTempObjectRegion(Ex: E, LC: Pred->getLocationContext());
863 } else {
864 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
865 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
866 Bldr.generateSink(PP: Pred->getLocation().withTag(tag: &T),
867 State: Pred->getState(), Pred);
868 return;
869 }
870 }
871
872 CallEventManager &CEMgr = getStateManager().getCallEventManager();
873 CallEventRef<CXXDestructorCall> Call = CEMgr.getCXXDestructorCall(
874 DD: DtorDecl, Trigger: S, Target: Dest, IsBase: IsBaseDtor, State, LCtx, ElemRef: getCFGElementRef());
875
876 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
877 Call->getSourceRange().getBegin(),
878 "Error evaluating destructor");
879
880 ExplodedNodeSet DstPreCall;
881 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred,
882 Call: *Call, Eng&: *this);
883
884 ExplodedNodeSet DstInvalidated;
885 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
886 for (ExplodedNode *N : DstPreCall)
887 defaultEvalCall(B&: Bldr, Pred: N, Call: *Call, CallOpts);
888
889 getCheckerManager().runCheckersForPostCall(Dst, Src: DstInvalidated,
890 Call: *Call, Eng&: *this);
891}
892
893void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
894 ExplodedNode *Pred,
895 ExplodedNodeSet &Dst) {
896 ProgramStateRef State = Pred->getState();
897 const LocationContext *LCtx = Pred->getLocationContext();
898 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
899 CNE->getBeginLoc(),
900 "Error evaluating New Allocator Call");
901 CallEventManager &CEMgr = getStateManager().getCallEventManager();
902 CallEventRef<CXXAllocatorCall> Call =
903 CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef());
904
905 ExplodedNodeSet DstPreCall;
906 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred,
907 Call: *Call, Eng&: *this);
908
909 ExplodedNodeSet DstPostCall;
910 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
911 for (ExplodedNode *I : DstPreCall) {
912 // Operator new calls (CXXNewExpr) are intentionally not eval-called,
913 // because it does not make sense to eval-call user-provided functions.
914 // 1) If the new operator can be inlined, then don't prevent it from
915 // inlining by having an eval-call of that operator.
916 // 2) If it can't be inlined, then the default conservative modeling
917 // is what we want anyway.
918 // So the best is to not allow eval-calling CXXNewExprs from checkers.
919 // Checkers can provide their pre/post-call callbacks if needed.
920 defaultEvalCall(B&: CallBldr, Pred: I, Call: *Call);
921 }
922 // If the call is inlined, DstPostCall will be empty and we bail out now.
923
924 // Store return value of operator new() for future use, until the actual
925 // CXXNewExpr gets processed.
926 ExplodedNodeSet DstPostValue;
927 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
928 for (ExplodedNode *I : DstPostCall) {
929 // FIXME: Because CNE serves as the "call site" for the allocator (due to
930 // lack of a better expression in the AST), the conjured return value symbol
931 // is going to be of the same type (C++ object pointer type). Technically
932 // this is not correct because the operator new's prototype always says that
933 // it returns a 'void *'. So we should change the type of the symbol,
934 // and then evaluate the cast over the symbolic pointer from 'void *' to
935 // the object pointer type. But without changing the symbol's type it
936 // is breaking too much to evaluate the no-op symbolic cast over it, so we
937 // skip it for now.
938 ProgramStateRef State = I->getState();
939 SVal RetVal = State->getSVal(Ex: CNE, LCtx);
940 // [basic.stc.dynamic.allocation] (on the return value of an allocation
941 // function):
942 // "The order, contiguity, and initial value of storage allocated by
943 // successive calls to an allocation function are unspecified."
944 State = State->bindDefaultInitial(loc: RetVal, V: UndefinedVal{}, LCtx);
945
946 // If this allocation function is not declared as non-throwing, failures
947 // /must/ be signalled by exceptions, and thus the return value will never
948 // be NULL. -fno-exceptions does not influence this semantics.
949 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
950 // where new can return NULL. If we end up supporting that option, we can
951 // consider adding a check for it here.
952 // C++11 [basic.stc.dynamic.allocation]p3.
953 if (const FunctionDecl *FD = CNE->getOperatorNew()) {
954 QualType Ty = FD->getType();
955 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
956 if (!ProtoType->isNothrow())
957 State = State->assume(Cond: RetVal.castAs<DefinedOrUnknownSVal>(), Assumption: true);
958 }
959
960 ValueBldr.generateNode(
961 S: CNE, Pred: I, St: addObjectUnderConstruction(State, Item: CNE, LC: LCtx, V: RetVal));
962 }
963
964 ExplodedNodeSet DstPostPostCallCallback;
965 getCheckerManager().runCheckersForPostCall(Dst&: DstPostPostCallCallback,
966 Src: DstPostValue, Call: *Call, Eng&: *this);
967 for (ExplodedNode *I : DstPostPostCallCallback) {
968 getCheckerManager().runCheckersForNewAllocator(Call: *Call, Dst, Pred: I, Eng&: *this);
969 }
970}
971
972void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
973 ExplodedNodeSet &Dst) {
974 // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
975 // Also, we need to decide how allocators actually work -- they're not
976 // really part of the CXXNewExpr because they happen BEFORE the
977 // CXXConstructExpr subexpression. See PR12014 for some discussion.
978
979 unsigned blockCount = currBldrCtx->blockCount();
980 const LocationContext *LCtx = Pred->getLocationContext();
981 SVal symVal = UnknownVal();
982 FunctionDecl *FD = CNE->getOperatorNew();
983
984 bool IsStandardGlobalOpNewFunction =
985 FD->isReplaceableGlobalAllocationFunction();
986
987 ProgramStateRef State = Pred->getState();
988
989 // Retrieve the stored operator new() return value.
990 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
991 symVal = *getObjectUnderConstruction(State, Item: CNE, LC: LCtx);
992 State = finishObjectConstruction(State, Item: CNE, LC: LCtx);
993 }
994
995 // We assume all standard global 'operator new' functions allocate memory in
996 // heap. We realize this is an approximation that might not correctly model
997 // a custom global allocator.
998 if (symVal.isUnknown()) {
999 if (IsStandardGlobalOpNewFunction)
1000 symVal = svalBuilder.getConjuredHeapSymbolVal(elem: getCFGElementRef(), LCtx,
1001 type: CNE->getType(), Count: blockCount);
1002 else
1003 symVal = svalBuilder.conjureSymbolVal(
1004 /*symbolTag=*/nullptr, elem: getCFGElementRef(), LCtx, count: blockCount);
1005 }
1006
1007 CallEventManager &CEMgr = getStateManager().getCallEventManager();
1008 CallEventRef<CXXAllocatorCall> Call =
1009 CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef());
1010
1011 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
1012 // Invalidate placement args.
1013 // FIXME: Once we figure out how we want allocators to work,
1014 // we should be using the usual pre-/(default-)eval-/post-call checkers
1015 // here.
1016 State = Call->invalidateRegions(BlockCount: blockCount, State);
1017 if (!State)
1018 return;
1019
1020 // If this allocation function is not declared as non-throwing, failures
1021 // /must/ be signalled by exceptions, and thus the return value will never
1022 // be NULL. -fno-exceptions does not influence this semantics.
1023 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
1024 // where new can return NULL. If we end up supporting that option, we can
1025 // consider adding a check for it here.
1026 // C++11 [basic.stc.dynamic.allocation]p3.
1027 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>())
1028 if (!ProtoType->isNothrow())
1029 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
1030 State = State->assume(Cond: *dSymVal, Assumption: true);
1031 }
1032
1033 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1034
1035 SVal Result = symVal;
1036
1037 if (CNE->isArray()) {
1038
1039 if (const auto *NewReg = cast_or_null<SubRegion>(Val: symVal.getAsRegion())) {
1040 // If each element is initialized by their default constructor, the field
1041 // values are properly placed inside the required region, however if an
1042 // initializer list is used, this doesn't happen automatically.
1043 auto *Init = CNE->getInitializer();
1044 bool isInitList =
1045 isa_and_nonnull<InitListExpr, CXXParenListInitExpr>(Val: Init);
1046
1047 QualType ObjTy =
1048 isInitList ? Init->getType() : CNE->getType()->getPointeeType();
1049 const ElementRegion *EleReg =
1050 MRMgr.getElementRegion(elementType: ObjTy, Idx: svalBuilder.makeArrayIndex(idx: 0), superRegion: NewReg,
1051 Ctx: svalBuilder.getContext());
1052 Result = loc::MemRegionVal(EleReg);
1053
1054 // If the array is list initialized, we bind the initializer list to the
1055 // memory region here, otherwise we would lose it.
1056 if (isInitList) {
1057 Bldr.takeNodes(N: Pred);
1058 Pred = Bldr.generateNode(S: CNE, Pred, St: State);
1059
1060 SVal V = State->getSVal(Ex: Init, LCtx);
1061 ExplodedNodeSet evaluated;
1062 evalBind(Dst&: evaluated, StoreE: CNE, Pred, location: Result, Val: V, AtDeclInit: true);
1063
1064 Bldr.takeNodes(N: Pred);
1065 Bldr.addNodes(S: evaluated);
1066
1067 Pred = *evaluated.begin();
1068 State = Pred->getState();
1069 }
1070 }
1071
1072 State = State->BindExpr(S: CNE, LCtx: Pred->getLocationContext(), V: Result);
1073 Bldr.generateNode(S: CNE, Pred, St: State);
1074 return;
1075 }
1076
1077 // FIXME: Once we have proper support for CXXConstructExprs inside
1078 // CXXNewExpr, we need to make sure that the constructed object is not
1079 // immediately invalidated here. (The placement call should happen before
1080 // the constructor call anyway.)
1081 if (FD->isReservedGlobalPlacementOperator()) {
1082 // Non-array placement new should always return the placement location.
1083 SVal PlacementLoc = State->getSVal(Ex: CNE->getPlacementArg(I: 0), LCtx);
1084 Result = svalBuilder.evalCast(V: PlacementLoc, CastTy: CNE->getType(),
1085 OriginalTy: CNE->getPlacementArg(I: 0)->getType());
1086 }
1087
1088 // Bind the address of the object, then check to see if we cached out.
1089 State = State->BindExpr(S: CNE, LCtx, V: Result);
1090 ExplodedNode *NewN = Bldr.generateNode(S: CNE, Pred, St: State);
1091 if (!NewN)
1092 return;
1093
1094 // If the type is not a record, we won't have a CXXConstructExpr as an
1095 // initializer. Copy the value over.
1096 if (const Expr *Init = CNE->getInitializer()) {
1097 if (!isa<CXXConstructExpr>(Val: Init)) {
1098 assert(Bldr.getResults().size() == 1);
1099 Bldr.takeNodes(N: NewN);
1100 evalBind(Dst, StoreE: CNE, Pred: NewN, location: Result, Val: State->getSVal(Ex: Init, LCtx),
1101 /*FirstInit=*/AtDeclInit: IsStandardGlobalOpNewFunction);
1102 }
1103 }
1104}
1105
1106void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
1107 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
1108
1109 CallEventManager &CEMgr = getStateManager().getCallEventManager();
1110 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
1111 E: CDE, State: Pred->getState(), LCtx: Pred->getLocationContext(), ElemRef: getCFGElementRef());
1112
1113 ExplodedNodeSet DstPreCall;
1114 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred, Call: *Call, Eng&: *this);
1115 ExplodedNodeSet DstPostCall;
1116
1117 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
1118 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx);
1119 for (ExplodedNode *I : DstPreCall) {
1120 // Intentionally either inline or conservative eval-call the operator
1121 // delete, but avoid triggering an eval-call event for checkers.
1122 // As detailed at handling CXXNewExprs, in short, because it does not
1123 // really make sense to eval-call user-provided functions.
1124 defaultEvalCall(B&: Bldr, Pred: I, Call: *Call);
1125 }
1126 } else {
1127 DstPostCall = DstPreCall;
1128 }
1129 getCheckerManager().runCheckersForPostCall(Dst, Src: DstPostCall, Call: *Call, Eng&: *this);
1130}
1131
1132void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
1133 ExplodedNodeSet &Dst) {
1134 const VarDecl *VD = CS->getExceptionDecl();
1135 if (!VD) {
1136 Dst.Add(N: Pred);
1137 return;
1138 }
1139
1140 const LocationContext *LCtx = Pred->getLocationContext();
1141 SVal V = svalBuilder.conjureSymbolVal(elem: getCFGElementRef(), LCtx, type: VD->getType(),
1142 visitCount: currBldrCtx->blockCount());
1143 ProgramStateRef state = Pred->getState();
1144 state = state->bindLoc(location: state->getLValue(VD, LC: LCtx), V, LCtx);
1145
1146 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1147 Bldr.generateNode(S: CS, Pred, St: state);
1148}
1149
1150void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
1151 ExplodedNodeSet &Dst) {
1152 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1153
1154 // Get the this object region from StoreManager.
1155 const LocationContext *LCtx = Pred->getLocationContext();
1156 const MemRegion *R =
1157 svalBuilder.getRegionManager().getCXXThisRegion(
1158 thisPointerTy: getContext().getCanonicalType(T: TE->getType()),
1159 LC: LCtx);
1160
1161 ProgramStateRef state = Pred->getState();
1162 SVal V = state->getSVal(LV: loc::MemRegionVal(R));
1163 Bldr.generateNode(S: TE, Pred, St: state->BindExpr(S: TE, LCtx, V));
1164}
1165
1166void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
1167 ExplodedNodeSet &Dst) {
1168 const LocationContext *LocCtxt = Pred->getLocationContext();
1169
1170 // Get the region of the lambda itself.
1171 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
1172 Ex: LE, LC: LocCtxt);
1173 SVal V = loc::MemRegionVal(R);
1174
1175 ProgramStateRef State = Pred->getState();
1176
1177 // If we created a new MemRegion for the lambda, we should explicitly bind
1178 // the captures.
1179 for (auto const [Idx, FieldForCapture, InitExpr] :
1180 llvm::zip(t: llvm::seq<unsigned>(Begin: 0, End: -1), u: LE->getLambdaClass()->fields(),
1181 args: LE->capture_inits())) {
1182 SVal FieldLoc = State->getLValue(decl: FieldForCapture, Base: V);
1183
1184 SVal InitVal;
1185 if (!FieldForCapture->hasCapturedVLAType()) {
1186 assert(InitExpr && "Capture missing initialization expression");
1187
1188 // Capturing a 0 length array is a no-op, so we ignore it to get a more
1189 // accurate analysis. If it's not ignored, it would set the default
1190 // binding of the lambda to 'Unknown', which can lead to falsely detecting
1191 // 'Uninitialized' values as 'Unknown' and not reporting a warning.
1192 const auto FTy = FieldForCapture->getType();
1193 if (FTy->isConstantArrayType() &&
1194 getContext().getConstantArrayElementCount(
1195 CA: getContext().getAsConstantArrayType(T: FTy)) == 0)
1196 continue;
1197
1198 // With C++17 copy elision the InitExpr can be anything, so instead of
1199 // pattern matching all cases, we simple check if the current field is
1200 // under construction or not, regardless what it's InitExpr is.
1201 if (const auto OUC =
1202 getObjectUnderConstruction(State, Item: {LE, Idx}, LC: LocCtxt)) {
1203 InitVal = State->getSVal(R: OUC->getAsRegion());
1204
1205 State = finishObjectConstruction(State, Item: {LE, Idx}, LC: LocCtxt);
1206 } else
1207 InitVal = State->getSVal(Ex: InitExpr, LCtx: LocCtxt);
1208
1209 } else {
1210
1211 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) &&
1212 "VLA capture by value is a compile time error!");
1213
1214 // The field stores the length of a captured variable-length array.
1215 // These captures don't have initialization expressions; instead we
1216 // get the length from the VLAType size expression.
1217 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
1218 InitVal = State->getSVal(Ex: SizeExpr, LCtx: LocCtxt);
1219 }
1220
1221 State = State->bindLoc(LV: FieldLoc, V: InitVal, LCtx: LocCtxt);
1222 }
1223
1224 // Decay the Loc into an RValue, because there might be a
1225 // MaterializeTemporaryExpr node above this one which expects the bound value
1226 // to be an RValue.
1227 SVal LambdaRVal = State->getSVal(R);
1228
1229 ExplodedNodeSet Tmp;
1230 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
1231 // FIXME: is this the right program point kind?
1232 Bldr.generateNode(S: LE, Pred,
1233 St: State->BindExpr(S: LE, LCtx: LocCtxt, V: LambdaRVal),
1234 tag: nullptr, K: ProgramPoint::PostLValueKind);
1235
1236 // FIXME: Move all post/pre visits to ::Visit().
1237 getCheckerManager().runCheckersForPostStmt(Dst, Src: Tmp, S: LE, Eng&: *this);
1238}
1239
1240void ExprEngine::VisitAttributedStmt(const AttributedStmt *A,
1241 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
1242 ExplodedNodeSet CheckerPreStmt;
1243 getCheckerManager().runCheckersForPreStmt(Dst&: CheckerPreStmt, Src: Pred, S: A, Eng&: *this);
1244
1245 ExplodedNodeSet EvalSet;
1246 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx);
1247
1248 for (const auto *Attr : getSpecificAttrs<CXXAssumeAttr>(container: A->getAttrs())) {
1249 for (ExplodedNode *N : CheckerPreStmt) {
1250 Visit(S: Attr->getAssumption()->IgnoreParens(), Pred: N, Dst&: EvalSet);
1251 }
1252 }
1253
1254 getCheckerManager().runCheckersForPostStmt(Dst, Src: EvalSet, S: A, Eng&: *this);
1255}
1256