1//===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines the C++ expression evaluation engine.
10//
11//===----------------------------------------------------------------------===//
12
13#include "clang/AST/ASTContext.h"
14#include "clang/AST/AttrIterator.h"
15#include "clang/AST/DeclCXX.h"
16#include "clang/AST/ParentMap.h"
17#include "clang/AST/StmtCXX.h"
18#include "clang/Analysis/ConstructionContext.h"
19#include "clang/Basic/PrettyStackTrace.h"
20#include "clang/StaticAnalyzer/Core/CheckerManager.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
22#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
23#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
24#include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
25#include "llvm/ADT/STLExtras.h"
26#include "llvm/ADT/Sequence.h"
27#include "llvm/Support/Casting.h"
28#include <optional>
29
30using namespace clang;
31using namespace ento;
32
33void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
34 ExplodedNode *Pred,
35 ExplodedNodeSet &Dst) {
36 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
37 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
38 ProgramStateRef state = Pred->getState();
39 const LocationContext *LCtx = Pred->getLocationContext();
40
41 state = createTemporaryRegionIfNeeded(State: state, LC: LCtx, InitWithAdjustments: tempExpr, Result: ME);
42 Bldr.generateNode(S: ME, Pred, St: state);
43}
44
45// FIXME: This is the sort of code that should eventually live in a Core
46// checker rather than as a special case in ExprEngine.
47void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
48 const CallEvent &Call) {
49 SVal ThisVal;
50 bool AlwaysReturnsLValue;
51 [[maybe_unused]] const CXXRecordDecl *ThisRD = nullptr;
52 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(Val: &Call)) {
53 assert(Ctor->getDecl()->isTrivial());
54 assert(Ctor->getDecl()->isCopyOrMoveConstructor());
55 ThisVal = Ctor->getCXXThisVal();
56 ThisRD = Ctor->getDecl()->getParent();
57 AlwaysReturnsLValue = false;
58 } else {
59 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
60 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
61 OO_Equal);
62 ThisVal = cast<CXXInstanceCall>(Val: Call).getCXXThisVal();
63 ThisRD = cast<CXXMethodDecl>(Val: Call.getDecl())->getParent();
64 AlwaysReturnsLValue = true;
65 }
66
67 const LocationContext *LCtx = Pred->getLocationContext();
68 const Expr *CallExpr = Call.getOriginExpr();
69
70 ExplodedNodeSet Dst;
71 Bldr.takeNodes(N: Pred);
72
73 assert(ThisRD);
74
75 if (!ThisRD->isEmpty()) {
76 SVal V = Call.getArgSVal(Index: 0);
77 const Expr *VExpr = Call.getArgExpr(Index: 0);
78
79 // If the value being copied is not unknown, load from its location to get
80 // an aggregate rvalue.
81 if (std::optional<Loc> L = V.getAs<Loc>())
82 V = Pred->getState()->getSVal(LV: *L);
83 else
84 assert(V.isUnknownOrUndef());
85
86 ExplodedNodeSet Tmp;
87 evalLocation(Dst&: Tmp, NodeEx: CallExpr, BoundEx: VExpr, Pred, St: Pred->getState(), location: V,
88 /*isLoad=*/true);
89 for (ExplodedNode *N : Tmp)
90 evalBind(Dst, StoreE: CallExpr, Pred: N, location: ThisVal, Val: V, AtDeclInit: !AlwaysReturnsLValue);
91 } else {
92 // We can't copy empty classes because of empty base class optimization.
93 // In that case, copying the empty base class subobject would overwrite the
94 // object that it overlaps with - so let's not do that.
95 // See issue-157467.cpp for an example.
96 Dst.Add(N: Pred);
97 }
98
99 PostStmt PS(CallExpr, LCtx);
100 for (ExplodedNode *N : Dst) {
101 ProgramStateRef State = N->getState();
102 if (AlwaysReturnsLValue)
103 State = State->BindExpr(S: CallExpr, LCtx, V: ThisVal);
104 else
105 State = bindReturnValue(Call, LCtx, State);
106 Bldr.generateNode(PP: PS, State, Pred: N);
107 }
108}
109
110SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue,
111 QualType &Ty, bool &IsArray, unsigned Idx) {
112 SValBuilder &SVB = State->getStateManager().getSValBuilder();
113 ASTContext &Ctx = SVB.getContext();
114
115 if (const ArrayType *AT = Ctx.getAsArrayType(T: Ty)) {
116 while (AT) {
117 Ty = AT->getElementType();
118 AT = dyn_cast<ArrayType>(Val: AT->getElementType());
119 }
120 LValue = State->getLValue(ElementType: Ty, Idx: SVB.makeArrayIndex(idx: Idx), Base: LValue);
121 IsArray = true;
122 }
123
124 return LValue;
125}
126
127// In case when the prvalue is returned from the function (kind is one of
128// SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then
129// it's materialization happens in context of the caller.
130// We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context.
131SVal ExprEngine::computeObjectUnderConstruction(
132 const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx,
133 const LocationContext *LCtx, const ConstructionContext *CC,
134 EvalCallOptions &CallOpts, unsigned Idx) {
135
136 SValBuilder &SVB = getSValBuilder();
137 MemRegionManager &MRMgr = SVB.getRegionManager();
138 ASTContext &ACtx = SVB.getContext();
139
140 // Compute the target region by exploring the construction context.
141 if (CC) {
142 switch (CC->getKind()) {
143 case ConstructionContext::CXX17ElidedCopyVariableKind:
144 case ConstructionContext::SimpleVariableKind: {
145 const auto *DSCC = cast<VariableConstructionContext>(Val: CC);
146 const auto *DS = DSCC->getDeclStmt();
147 const auto *Var = cast<VarDecl>(Val: DS->getSingleDecl());
148 QualType Ty = Var->getType();
149 return makeElementRegion(State, LValue: State->getLValue(VD: Var, LC: LCtx), Ty,
150 IsArray&: CallOpts.IsArrayCtorOrDtor, Idx);
151 }
152 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
153 case ConstructionContext::SimpleConstructorInitializerKind: {
154 const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC);
155 const auto *Init = ICC->getCXXCtorInitializer();
156 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl());
157 Loc ThisPtr = SVB.getCXXThis(D: CurCtor, SFC: LCtx->getStackFrame());
158 SVal ThisVal = State->getSVal(LV: ThisPtr);
159 if (Init->isBaseInitializer()) {
160 const auto *ThisReg = cast<SubRegion>(Val: ThisVal.getAsRegion());
161 const CXXRecordDecl *BaseClass =
162 Init->getBaseClass()->getAsCXXRecordDecl();
163 const auto *BaseReg =
164 MRMgr.getCXXBaseObjectRegion(BaseClass, Super: ThisReg,
165 IsVirtual: Init->isBaseVirtual());
166 return SVB.makeLoc(region: BaseReg);
167 }
168 if (Init->isDelegatingInitializer())
169 return ThisVal;
170
171 const ValueDecl *Field;
172 SVal FieldVal;
173 if (Init->isIndirectMemberInitializer()) {
174 Field = Init->getIndirectMember();
175 FieldVal = State->getLValue(decl: Init->getIndirectMember(), Base: ThisVal);
176 } else {
177 Field = Init->getMember();
178 FieldVal = State->getLValue(decl: Init->getMember(), Base: ThisVal);
179 }
180
181 QualType Ty = Field->getType();
182 return makeElementRegion(State, LValue: FieldVal, Ty, IsArray&: CallOpts.IsArrayCtorOrDtor,
183 Idx);
184 }
185 case ConstructionContext::NewAllocatedObjectKind: {
186 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
187 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(Val: CC);
188 const auto *NE = NECC->getCXXNewExpr();
189 SVal V = *getObjectUnderConstruction(State, Item: NE, LC: LCtx);
190 if (const SubRegion *MR =
191 dyn_cast_or_null<SubRegion>(Val: V.getAsRegion())) {
192 if (NE->isArray()) {
193 CallOpts.IsArrayCtorOrDtor = true;
194
195 auto Ty = NE->getType()->getPointeeType();
196 while (const auto *AT = getContext().getAsArrayType(T: Ty))
197 Ty = AT->getElementType();
198
199 auto R = MRMgr.getElementRegion(elementType: Ty, Idx: svalBuilder.makeArrayIndex(idx: Idx),
200 superRegion: MR, Ctx: SVB.getContext());
201
202 return loc::MemRegionVal(R);
203 }
204 return V;
205 }
206 // TODO: Detect when the allocator returns a null pointer.
207 // Constructor shall not be called in this case.
208 }
209 break;
210 }
211 case ConstructionContext::SimpleReturnedValueKind:
212 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
213 // The temporary is to be managed by the parent stack frame.
214 // So build it in the parent stack frame if we're not in the
215 // top frame of the analysis.
216 const StackFrameContext *SFC = LCtx->getStackFrame();
217 if (const LocationContext *CallerLCtx = SFC->getParent()) {
218 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
219 .getAs<CFGCXXRecordTypedCall>();
220 if (!RTC) {
221 // We were unable to find the correct construction context for the
222 // call in the parent stack frame. This is equivalent to not being
223 // able to find construction context at all.
224 break;
225 }
226 if (isa<BlockInvocationContext>(Val: CallerLCtx)) {
227 // Unwrap block invocation contexts. They're mostly part of
228 // the current stack frame.
229 CallerLCtx = CallerLCtx->getParent();
230 assert(!isa<BlockInvocationContext>(CallerLCtx));
231 }
232
233 NodeBuilderContext CallerBldrCtx(getCoreEngine(),
234 SFC->getCallSiteBlock(), CallerLCtx);
235 return computeObjectUnderConstruction(
236 E: cast<Expr>(Val: SFC->getCallSite()), State, BldrCtx: &CallerBldrCtx, LCtx: CallerLCtx,
237 CC: RTC->getConstructionContext(), CallOpts);
238 } else {
239 // We are on the top frame of the analysis. We do not know where is the
240 // object returned to. Conjure a symbolic region for the return value.
241 // TODO: We probably need a new MemRegion kind to represent the storage
242 // of that SymbolicRegion, so that we could produce a fancy symbol
243 // instead of an anonymous conjured symbol.
244 // TODO: Do we need to track the region to avoid having it dead
245 // too early? It does die too early, at least in C++17, but because
246 // putting anything into a SymbolicRegion causes an immediate escape,
247 // it doesn't cause any leak false positives.
248 const auto *RCC = cast<ReturnedValueConstructionContext>(Val: CC);
249 // Make sure that this doesn't coincide with any other symbol
250 // conjured for the returned expression.
251 static const int TopLevelSymRegionTag = 0;
252 const Expr *RetE = RCC->getReturnStmt()->getRetValue();
253 assert(RetE && "Void returns should not have a construction context");
254 QualType ReturnTy = RetE->getType();
255 QualType RegionTy = ACtx.getPointerType(T: ReturnTy);
256 return SVB.conjureSymbolVal(symbolTag: &TopLevelSymRegionTag, elem: getCFGElementRef(),
257 LCtx: SFC, type: RegionTy, count: currBldrCtx->blockCount());
258 }
259 llvm_unreachable("Unhandled return value construction context!");
260 }
261 case ConstructionContext::ElidedTemporaryObjectKind: {
262 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
263 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC);
264
265 // Support pre-C++17 copy elision. We'll have the elidable copy
266 // constructor in the AST and in the CFG, but we'll skip it
267 // and construct directly into the final object. This call
268 // also sets the CallOpts flags for us.
269 // If the elided copy/move constructor is not supported, there's still
270 // benefit in trying to model the non-elided constructor.
271 // Stash our state before trying to elide, as it'll get overwritten.
272 ProgramStateRef PreElideState = State;
273 EvalCallOptions PreElideCallOpts = CallOpts;
274
275 SVal V = computeObjectUnderConstruction(
276 E: TCC->getConstructorAfterElision(), State, BldrCtx, LCtx,
277 CC: TCC->getConstructionContextAfterElision(), CallOpts);
278
279 // FIXME: This definition of "copy elision has not failed" is unreliable.
280 // It doesn't indicate that the constructor will actually be inlined
281 // later; this is still up to evalCall() to decide.
282 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
283 return V;
284
285 // Copy elision failed. Revert the changes and proceed as if we have
286 // a simple temporary.
287 CallOpts = PreElideCallOpts;
288 CallOpts.IsElidableCtorThatHasNotBeenElided = true;
289 [[fallthrough]];
290 }
291 case ConstructionContext::SimpleTemporaryObjectKind: {
292 const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC);
293 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
294
295 CallOpts.IsTemporaryCtorOrDtor = true;
296 if (MTE) {
297 if (const ValueDecl *VD = MTE->getExtendingDecl()) {
298 StorageDuration SD = MTE->getStorageDuration();
299 assert(SD != SD_FullExpression);
300 if (!VD->getType()->isReferenceType()) {
301 // We're lifetime-extended by a surrounding aggregate.
302 // Automatic destructors aren't quite working in this case
303 // on the CFG side. We should warn the caller about that.
304 // FIXME: Is there a better way to retrieve this information from
305 // the MaterializeTemporaryExpr?
306 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
307 }
308
309 if (SD == SD_Static || SD == SD_Thread)
310 return loc::MemRegionVal(
311 MRMgr.getCXXStaticLifetimeExtendedObjectRegion(Ex: E, VD));
312
313 return loc::MemRegionVal(
314 MRMgr.getCXXLifetimeExtendedObjectRegion(Ex: E, VD, LC: LCtx));
315 }
316 assert(MTE->getStorageDuration() == SD_FullExpression);
317 }
318
319 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
320 }
321 case ConstructionContext::LambdaCaptureKind: {
322 CallOpts.IsTemporaryCtorOrDtor = true;
323
324 const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC);
325
326 SVal Base = loc::MemRegionVal(
327 MRMgr.getCXXTempObjectRegion(Ex: LCC->getInitializer(), LC: LCtx));
328
329 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(Val: E);
330 if (getIndexOfElementToConstruct(State, E: CE, LCtx)) {
331 CallOpts.IsArrayCtorOrDtor = true;
332 Base = State->getLValue(ElementType: E->getType(), Idx: svalBuilder.makeArrayIndex(idx: Idx),
333 Base);
334 }
335
336 return Base;
337 }
338 case ConstructionContext::ArgumentKind: {
339 // Arguments are technically temporaries.
340 CallOpts.IsTemporaryCtorOrDtor = true;
341
342 const auto *ACC = cast<ArgumentConstructionContext>(Val: CC);
343 const Expr *E = ACC->getCallLikeExpr();
344 unsigned Idx = ACC->getIndex();
345
346 CallEventManager &CEMgr = getStateManager().getCallEventManager();
347 auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> {
348 const LocationContext *FutureSFC =
349 Caller->getCalleeStackFrame(BlockCount: BldrCtx->blockCount());
350 // Return early if we are unable to reliably foresee
351 // the future stack frame.
352 if (!FutureSFC)
353 return std::nullopt;
354
355 // This should be equivalent to Caller->getDecl() for now, but
356 // FutureSFC->getDecl() is likely to support better stuff (like
357 // virtual functions) earlier.
358 const Decl *CalleeD = FutureSFC->getDecl();
359
360 // FIXME: Support for variadic arguments is not implemented here yet.
361 if (CallEvent::isVariadic(D: CalleeD))
362 return std::nullopt;
363
364 // Operator arguments do not correspond to operator parameters
365 // because this-argument is implemented as a normal argument in
366 // operator call expressions but not in operator declarations.
367 const TypedValueRegion *TVR = Caller->getParameterLocation(
368 Index: *Caller->getAdjustedParameterIndex(ASTArgumentIndex: Idx), BlockCount: BldrCtx->blockCount());
369 if (!TVR)
370 return std::nullopt;
371
372 return loc::MemRegionVal(TVR);
373 };
374
375 if (const auto *CE = dyn_cast<CallExpr>(Val: E)) {
376 CallEventRef<> Caller =
377 CEMgr.getSimpleCall(E: CE, State, LCtx, ElemRef: getCFGElementRef());
378 if (std::optional<SVal> V = getArgLoc(Caller))
379 return *V;
380 else
381 break;
382 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(Val: E)) {
383 // Don't bother figuring out the target region for the future
384 // constructor because we won't need it.
385 CallEventRef<> Caller = CEMgr.getCXXConstructorCall(
386 E: CCE, /*Target=*/nullptr, State, LCtx, ElemRef: getCFGElementRef());
387 if (std::optional<SVal> V = getArgLoc(Caller))
388 return *V;
389 else
390 break;
391 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(Val: E)) {
392 CallEventRef<> Caller =
393 CEMgr.getObjCMethodCall(E: ME, State, LCtx, ElemRef: getCFGElementRef());
394 if (std::optional<SVal> V = getArgLoc(Caller))
395 return *V;
396 else
397 break;
398 }
399 }
400 } // switch (CC->getKind())
401 }
402
403 // If we couldn't find an existing region to construct into, assume we're
404 // constructing a temporary. Notify the caller of our failure.
405 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
406 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
407}
408
409ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
410 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
411 const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
412 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
413 // Sounds like we failed to find the target region and therefore
414 // copy elision failed. There's nothing we can do about it here.
415 return State;
416 }
417
418 // See if we're constructing an existing region by looking at the
419 // current construction context.
420 assert(CC && "Computed target region without construction context?");
421 switch (CC->getKind()) {
422 case ConstructionContext::CXX17ElidedCopyVariableKind:
423 case ConstructionContext::SimpleVariableKind: {
424 const auto *DSCC = cast<VariableConstructionContext>(Val: CC);
425 return addObjectUnderConstruction(State, Item: DSCC->getDeclStmt(), LC: LCtx, V);
426 }
427 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
428 case ConstructionContext::SimpleConstructorInitializerKind: {
429 const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC);
430 const auto *Init = ICC->getCXXCtorInitializer();
431 // Base and delegating initializers handled above
432 assert(Init->isAnyMemberInitializer() &&
433 "Base and delegating initializers should have been handled by"
434 "computeObjectUnderConstruction()");
435 return addObjectUnderConstruction(State, Item: Init, LC: LCtx, V);
436 }
437 case ConstructionContext::NewAllocatedObjectKind: {
438 return State;
439 }
440 case ConstructionContext::SimpleReturnedValueKind:
441 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
442 const StackFrameContext *SFC = LCtx->getStackFrame();
443 const LocationContext *CallerLCtx = SFC->getParent();
444 if (!CallerLCtx) {
445 // No extra work is necessary in top frame.
446 return State;
447 }
448
449 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
450 .getAs<CFGCXXRecordTypedCall>();
451 assert(RTC && "Could not have had a target region without it");
452 if (isa<BlockInvocationContext>(Val: CallerLCtx)) {
453 // Unwrap block invocation contexts. They're mostly part of
454 // the current stack frame.
455 CallerLCtx = CallerLCtx->getParent();
456 assert(!isa<BlockInvocationContext>(CallerLCtx));
457 }
458
459 return updateObjectsUnderConstruction(V,
460 E: cast<Expr>(Val: SFC->getCallSite()), State, LCtx: CallerLCtx,
461 CC: RTC->getConstructionContext(), CallOpts);
462 }
463 case ConstructionContext::ElidedTemporaryObjectKind: {
464 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
465 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
466 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC);
467 State = updateObjectsUnderConstruction(
468 V, E: TCC->getConstructorAfterElision(), State, LCtx,
469 CC: TCC->getConstructionContextAfterElision(), CallOpts);
470
471 // Remember that we've elided the constructor.
472 State = addObjectUnderConstruction(
473 State, Item: TCC->getConstructorAfterElision(), LC: LCtx, V);
474
475 // Remember that we've elided the destructor.
476 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
477 State = elideDestructor(State, BTE, LC: LCtx);
478
479 // Instead of materialization, shamelessly return
480 // the final object destination.
481 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
482 State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V);
483
484 return State;
485 }
486 // If we decided not to elide the constructor, proceed as if
487 // it's a simple temporary.
488 [[fallthrough]];
489 }
490 case ConstructionContext::SimpleTemporaryObjectKind: {
491 const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC);
492 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
493 State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V);
494
495 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
496 State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V);
497
498 return State;
499 }
500 case ConstructionContext::LambdaCaptureKind: {
501 const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC);
502
503 // If we capture and array, we want to store the super region, not a
504 // sub-region.
505 if (const auto *EL = dyn_cast_or_null<ElementRegion>(Val: V.getAsRegion()))
506 V = loc::MemRegionVal(EL->getSuperRegion());
507
508 return addObjectUnderConstruction(
509 State, Item: {LCC->getLambdaExpr(), LCC->getIndex()}, LC: LCtx, V);
510 }
511 case ConstructionContext::ArgumentKind: {
512 const auto *ACC = cast<ArgumentConstructionContext>(Val: CC);
513 if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
514 State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V);
515
516 return addObjectUnderConstruction(
517 State, Item: {ACC->getCallLikeExpr(), ACC->getIndex()}, LC: LCtx, V);
518 }
519 }
520 llvm_unreachable("Unhandled construction context!");
521}
522
523static ProgramStateRef
524bindRequiredArrayElementToEnvironment(ProgramStateRef State,
525 const ArrayInitLoopExpr *AILE,
526 const LocationContext *LCtx, NonLoc Idx) {
527 SValBuilder &SVB = State->getStateManager().getSValBuilder();
528 MemRegionManager &MRMgr = SVB.getRegionManager();
529 ASTContext &Ctx = SVB.getContext();
530
531 // HACK: There is no way we can put the index of the array element into the
532 // CFG unless we unroll the loop, so we manually select and bind the required
533 // parameter to the environment.
534 const Expr *SourceArray = AILE->getCommonExpr()->getSourceExpr();
535 const auto *Ctor =
536 cast<CXXConstructExpr>(Val: extractElementInitializerFromNestedAILE(AILE));
537
538 const auto *SourceArrayRegion =
539 cast<SubRegion>(Val: State->getSVal(Ex: SourceArray, LCtx).getAsRegion());
540 const ElementRegion *ElementRegion =
541 MRMgr.getElementRegion(elementType: Ctor->getType(), Idx, superRegion: SourceArrayRegion, Ctx);
542
543 return State->BindExpr(S: Ctor->getArg(Arg: 0), LCtx,
544 V: loc::MemRegionVal(ElementRegion));
545}
546
547void ExprEngine::handleConstructor(const Expr *E,
548 ExplodedNode *Pred,
549 ExplodedNodeSet &destNodes) {
550 const auto *CE = dyn_cast<CXXConstructExpr>(Val: E);
551 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(Val: E);
552 assert(CE || CIE);
553
554 const LocationContext *LCtx = Pred->getLocationContext();
555 ProgramStateRef State = Pred->getState();
556
557 SVal Target = UnknownVal();
558
559 if (CE) {
560 if (std::optional<SVal> ElidedTarget =
561 getObjectUnderConstruction(State, Item: CE, LC: LCtx)) {
562 // We've previously modeled an elidable constructor by pretending that
563 // it in fact constructs into the correct target. This constructor can
564 // therefore be skipped.
565 Target = *ElidedTarget;
566 NodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
567 State = finishObjectConstruction(State, Item: CE, LC: LCtx);
568 if (auto L = Target.getAs<Loc>())
569 State = State->BindExpr(S: CE, LCtx, V: State->getSVal(LV: *L, T: CE->getType()));
570 Bldr.generateNode(S: CE, Pred, St: State);
571 return;
572 }
573 }
574
575 EvalCallOptions CallOpts;
576 auto C = getCurrentCFGElement().getAs<CFGConstructor>();
577 assert(C || getCurrentCFGElement().getAs<CFGStmt>());
578 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
579
580 const CXXConstructionKind CK =
581 CE ? CE->getConstructionKind() : CIE->getConstructionKind();
582 switch (CK) {
583 case CXXConstructionKind::Complete: {
584 // Inherited constructors are always base class constructors.
585 assert(CE && !CIE && "A complete constructor is inherited?!");
586
587 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it
588 // differently.
589 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr;
590
591 unsigned Idx = 0;
592 if (CE->getType()->isArrayType() || AILE) {
593
594 auto isZeroSizeArray = [&] {
595 uint64_t Size = 1;
596
597 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val: CE->getType()))
598 Size = getContext().getConstantArrayElementCount(CA: CAT);
599 else if (AILE)
600 Size = getContext().getArrayInitLoopExprElementCount(AILE);
601
602 return Size == 0;
603 };
604
605 // No element construction will happen in a 0 size array.
606 if (isZeroSizeArray()) {
607 NodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
608 static SimpleProgramPointTag T{"ExprEngine",
609 "Skipping 0 size array construction"};
610 Bldr.generateNode(S: CE, Pred, St: State, tag: &T);
611 return;
612 }
613
614 Idx = getIndexOfElementToConstruct(State, E: CE, LCtx).value_or(u: 0u);
615 State = setIndexOfElementToConstruct(State, E: CE, LCtx, Idx: Idx + 1);
616 }
617
618 if (AILE) {
619 // Only set this once even though we loop through it multiple times.
620 if (!getPendingInitLoop(State, E: CE, LCtx))
621 State = setPendingInitLoop(
622 State, E: CE, LCtx,
623 Idx: getContext().getArrayInitLoopExprElementCount(AILE));
624
625 State = bindRequiredArrayElementToEnvironment(
626 State, AILE, LCtx, Idx: svalBuilder.makeArrayIndex(idx: Idx));
627 }
628
629 // The target region is found from construction context.
630 std::tie(args&: State, args&: Target) = handleConstructionContext(
631 E: CE, State, BldrCtx: currBldrCtx, LCtx, CC, CallOpts, Idx);
632 break;
633 }
634 case CXXConstructionKind::VirtualBase: {
635 // Make sure we are not calling virtual base class initializers twice.
636 // Only the most-derived object should initialize virtual base classes.
637 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
638 Val: LCtx->getStackFrame()->getCallSite());
639 assert(
640 (!OuterCtor ||
641 OuterCtor->getConstructionKind() == CXXConstructionKind::Complete ||
642 OuterCtor->getConstructionKind() == CXXConstructionKind::Delegating) &&
643 ("This virtual base should have already been initialized by "
644 "the most derived class!"));
645 (void)OuterCtor;
646 [[fallthrough]];
647 }
648 case CXXConstructionKind::NonVirtualBase:
649 // In C++17, classes with non-virtual bases may be aggregates, so they would
650 // be initialized as aggregates without a constructor call, so we may have
651 // a base class constructed directly into an initializer list without
652 // having the derived-class constructor call on the previous stack frame.
653 // Initializer lists may be nested into more initializer lists that
654 // correspond to surrounding aggregate initializations.
655 // FIXME: For now this code essentially bails out. We need to find the
656 // correct target region and set it.
657 // FIXME: Instead of relying on the ParentMap, we should have the
658 // trigger-statement (InitListExpr or CXXParenListInitExpr in this case)
659 // passed down from CFG or otherwise always available during construction.
660 if (isa_and_nonnull<InitListExpr, CXXParenListInitExpr>(
661 Val: LCtx->getParentMap().getParent(S: E))) {
662 MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
663 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx));
664 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
665 break;
666 }
667 [[fallthrough]];
668 case CXXConstructionKind::Delegating: {
669 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl());
670 Loc ThisPtr = getSValBuilder().getCXXThis(D: CurCtor,
671 SFC: LCtx->getStackFrame());
672 SVal ThisVal = State->getSVal(LV: ThisPtr);
673
674 if (CK == CXXConstructionKind::Delegating) {
675 Target = ThisVal;
676 } else {
677 // Cast to the base type.
678 bool IsVirtual = (CK == CXXConstructionKind::VirtualBase);
679 SVal BaseVal =
680 getStoreManager().evalDerivedToBase(Derived: ThisVal, DerivedPtrType: E->getType(), IsVirtual);
681 Target = BaseVal;
682 }
683 break;
684 }
685 }
686
687 if (State != Pred->getState()) {
688 static SimpleProgramPointTag T("ExprEngine",
689 "Prepare for object construction");
690 ExplodedNodeSet DstPrepare;
691 NodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
692 Pred =
693 BldrPrepare.generateNode(S: E, Pred, St: State, tag: &T, K: ProgramPoint::PreStmtKind);
694 if (!Pred)
695 return;
696 }
697
698 const MemRegion *TargetRegion = Target.getAsRegion();
699 CallEventManager &CEMgr = getStateManager().getCallEventManager();
700 CallEventRef<> Call =
701 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
702 E: CIE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef())
703 : (CallEventRef<>)CEMgr.getCXXConstructorCall(
704 E: CE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef());
705
706 ExplodedNodeSet DstPreVisit;
707 getCheckerManager().runCheckersForPreStmt(Dst&: DstPreVisit, Src: Pred, S: E, Eng&: *this);
708
709 ExplodedNodeSet PreInitialized;
710 if (CE) {
711 // FIXME: Is it possible and/or useful to do this before PreStmt?
712 NodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
713 for (ExplodedNode *N : DstPreVisit) {
714 ProgramStateRef State = N->getState();
715 if (CE->requiresZeroInitialization()) {
716 // FIXME: Once we properly handle constructors in new-expressions, we'll
717 // need to invalidate the region before setting a default value, to make
718 // sure there aren't any lingering bindings around. This probably needs
719 // to happen regardless of whether or not the object is zero-initialized
720 // to handle random fields of a placement-initialized object picking up
721 // old bindings. We might only want to do it when we need to, though.
722 // FIXME: This isn't actually correct for arrays -- we need to zero-
723 // initialize the entire array, not just the first element -- but our
724 // handling of arrays everywhere else is weak as well, so this shouldn't
725 // actually make things worse. Placement new makes this tricky as well,
726 // since it's then possible to be initializing one part of a multi-
727 // dimensional array.
728 const CXXRecordDecl *TargetHeldRecord =
729 dyn_cast_or_null<CXXRecordDecl>(Val: CE->getType()->getAsRecordDecl());
730
731 if (!TargetHeldRecord || !TargetHeldRecord->isEmpty())
732 State = State->bindDefaultZero(loc: Target, LCtx);
733 }
734
735 Bldr.generateNode(S: CE, Pred: N, St: State, /*tag=*/nullptr,
736 K: ProgramPoint::PreStmtKind);
737 }
738 } else {
739 PreInitialized = DstPreVisit;
740 }
741
742 ExplodedNodeSet DstPreCall;
743 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: PreInitialized,
744 Call: *Call, Eng&: *this);
745
746 ExplodedNodeSet DstEvaluated;
747
748 if (CE && CE->getConstructor()->isTrivial() &&
749 CE->getConstructor()->isCopyOrMoveConstructor() &&
750 !CallOpts.IsArrayCtorOrDtor) {
751 NodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
752 // FIXME: Handle other kinds of trivial constructors as well.
753 for (ExplodedNode *N : DstPreCall)
754 performTrivialCopy(Bldr, Pred: N, Call: *Call);
755
756 } else {
757 for (ExplodedNode *N : DstPreCall)
758 getCheckerManager().runCheckersForEvalCall(Dst&: DstEvaluated, Src: N, CE: *Call, Eng&: *this,
759 CallOpts);
760 }
761
762 // If the CFG was constructed without elements for temporary destructors
763 // and the just-called constructor created a temporary object then
764 // stop exploration if the temporary object has a noreturn constructor.
765 // This can lose coverage because the destructor, if it were present
766 // in the CFG, would be called at the end of the full expression or
767 // later (for life-time extended temporaries) -- but avoids infeasible
768 // paths when no-return temporary destructors are used for assertions.
769 ExplodedNodeSet DstEvaluatedPostProcessed;
770 NodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
771 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
772 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
773 if (llvm::isa_and_nonnull<CXXTempObjectRegion,
774 CXXLifetimeExtendedObjectRegion>(Val: TargetRegion) &&
775 cast<CXXConstructorDecl>(Val: Call->getDecl())
776 ->getParent()
777 ->isAnyDestructorNoReturn()) {
778
779 // If we've inlined the constructor, then DstEvaluated would be empty.
780 // In this case we still want a sink, which could be implemented
781 // in processCallExit. But we don't have that implemented at the moment,
782 // so if you hit this assertion, see if you can avoid inlining
783 // the respective constructor when analyzer-config cfg-temporary-dtors
784 // is set to false.
785 // Otherwise there's nothing wrong with inlining such constructor.
786 assert(!DstEvaluated.empty() &&
787 "We should not have inlined this constructor!");
788
789 for (ExplodedNode *N : DstEvaluated) {
790 Bldr.generateSink(S: E, Pred: N, St: N->getState());
791 }
792
793 // There is no need to run the PostCall and PostStmt checker
794 // callbacks because we just generated sinks on all nodes in th
795 // frontier.
796 return;
797 }
798 }
799
800 ExplodedNodeSet DstPostArgumentCleanup;
801 for (ExplodedNode *I : DstEvaluatedPostProcessed)
802 finishArgumentConstruction(Dst&: DstPostArgumentCleanup, Pred: I, Call: *Call);
803
804 // If there were other constructors called for object-type arguments
805 // of this constructor, clean them up.
806 ExplodedNodeSet DstPostCall;
807 getCheckerManager().runCheckersForPostCall(Dst&: DstPostCall,
808 Src: DstPostArgumentCleanup,
809 Call: *Call, Eng&: *this);
810 getCheckerManager().runCheckersForPostStmt(Dst&: destNodes, Src: DstPostCall, S: E, Eng&: *this);
811}
812
813void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
814 ExplodedNode *Pred,
815 ExplodedNodeSet &Dst) {
816 handleConstructor(E: CE, Pred, destNodes&: Dst);
817}
818
819void ExprEngine::VisitCXXInheritedCtorInitExpr(
820 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
821 ExplodedNodeSet &Dst) {
822 handleConstructor(E: CE, Pred, destNodes&: Dst);
823}
824
825void ExprEngine::VisitCXXDestructor(QualType ObjectType,
826 const MemRegion *Dest,
827 const Stmt *S,
828 bool IsBaseDtor,
829 ExplodedNode *Pred,
830 ExplodedNodeSet &Dst,
831 EvalCallOptions &CallOpts) {
832 assert(S && "A destructor without a trigger!");
833 const LocationContext *LCtx = Pred->getLocationContext();
834 ProgramStateRef State = Pred->getState();
835
836 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
837 assert(RecordDecl && "Only CXXRecordDecls should have destructors");
838 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
839 // FIXME: There should always be a Decl, otherwise the destructor call
840 // shouldn't have been added to the CFG in the first place.
841 if (!DtorDecl) {
842 // Skip the invalid destructor. We cannot simply return because
843 // it would interrupt the analysis instead.
844 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
845 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
846 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx,
847 getCFGElementRef(), &T);
848 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
849 Bldr.generateNode(PP, State: Pred->getState(), Pred);
850 return;
851 }
852
853 if (!Dest) {
854 // We're trying to destroy something that is not a region. This may happen
855 // for a variety of reasons (unknown target region, concrete integer instead
856 // of target region, etc.). The current code makes an attempt to recover.
857 // FIXME: We probably don't really need to recover when we're dealing
858 // with concrete integers specifically.
859 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
860 if (const Expr *E = dyn_cast_or_null<Expr>(Val: S)) {
861 Dest = MRMgr.getCXXTempObjectRegion(Ex: E, LC: Pred->getLocationContext());
862 } else {
863 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
864 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
865 Bldr.generateSink(PP: Pred->getLocation().withTag(tag: &T),
866 State: Pred->getState(), Pred);
867 return;
868 }
869 }
870
871 CallEventManager &CEMgr = getStateManager().getCallEventManager();
872 CallEventRef<CXXDestructorCall> Call = CEMgr.getCXXDestructorCall(
873 DD: DtorDecl, Trigger: S, Target: Dest, IsBase: IsBaseDtor, State, LCtx, ElemRef: getCFGElementRef());
874
875 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
876 Call->getSourceRange().getBegin(),
877 "Error evaluating destructor");
878
879 ExplodedNodeSet DstPreCall;
880 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred,
881 Call: *Call, Eng&: *this);
882
883 ExplodedNodeSet DstInvalidated;
884 NodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
885 for (ExplodedNode *N : DstPreCall)
886 defaultEvalCall(B&: Bldr, Pred: N, Call: *Call, CallOpts);
887
888 getCheckerManager().runCheckersForPostCall(Dst, Src: DstInvalidated,
889 Call: *Call, Eng&: *this);
890}
891
892void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
893 ExplodedNode *Pred,
894 ExplodedNodeSet &Dst) {
895 ProgramStateRef State = Pred->getState();
896 const LocationContext *LCtx = Pred->getLocationContext();
897 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
898 CNE->getBeginLoc(),
899 "Error evaluating New Allocator Call");
900 CallEventManager &CEMgr = getStateManager().getCallEventManager();
901 CallEventRef<CXXAllocatorCall> Call =
902 CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef());
903
904 ExplodedNodeSet DstPreCall;
905 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred,
906 Call: *Call, Eng&: *this);
907
908 ExplodedNodeSet DstPostCall;
909 NodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
910 for (ExplodedNode *I : DstPreCall) {
911 // Operator new calls (CXXNewExpr) are intentionally not eval-called,
912 // because it does not make sense to eval-call user-provided functions.
913 // 1) If the new operator can be inlined, then don't prevent it from
914 // inlining by having an eval-call of that operator.
915 // 2) If it can't be inlined, then the default conservative modeling
916 // is what we want anyway.
917 // So the best is to not allow eval-calling CXXNewExprs from checkers.
918 // Checkers can provide their pre/post-call callbacks if needed.
919 defaultEvalCall(B&: CallBldr, Pred: I, Call: *Call);
920 }
921 // If the call is inlined, DstPostCall will be empty and we bail out now.
922
923 // Store return value of operator new() for future use, until the actual
924 // CXXNewExpr gets processed.
925 ExplodedNodeSet DstPostValue;
926 NodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
927 for (ExplodedNode *I : DstPostCall) {
928 // FIXME: Because CNE serves as the "call site" for the allocator (due to
929 // lack of a better expression in the AST), the conjured return value symbol
930 // is going to be of the same type (C++ object pointer type). Technically
931 // this is not correct because the operator new's prototype always says that
932 // it returns a 'void *'. So we should change the type of the symbol,
933 // and then evaluate the cast over the symbolic pointer from 'void *' to
934 // the object pointer type. But without changing the symbol's type it
935 // is breaking too much to evaluate the no-op symbolic cast over it, so we
936 // skip it for now.
937 ProgramStateRef State = I->getState();
938 SVal RetVal = State->getSVal(Ex: CNE, LCtx);
939 // [basic.stc.dynamic.allocation] (on the return value of an allocation
940 // function):
941 // "The order, contiguity, and initial value of storage allocated by
942 // successive calls to an allocation function are unspecified."
943 State = State->bindDefaultInitial(loc: RetVal, V: UndefinedVal{}, LCtx);
944
945 // If this allocation function is not declared as non-throwing, failures
946 // /must/ be signalled by exceptions, and thus the return value will never
947 // be NULL. -fno-exceptions does not influence this semantics.
948 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
949 // where new can return NULL. If we end up supporting that option, we can
950 // consider adding a check for it here.
951 // C++11 [basic.stc.dynamic.allocation]p3.
952 if (const FunctionDecl *FD = CNE->getOperatorNew()) {
953 QualType Ty = FD->getType();
954 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
955 if (!ProtoType->isNothrow())
956 State = State->assume(Cond: RetVal.castAs<DefinedOrUnknownSVal>(), Assumption: true);
957 }
958
959 ValueBldr.generateNode(
960 S: CNE, Pred: I, St: addObjectUnderConstruction(State, Item: CNE, LC: LCtx, V: RetVal));
961 }
962
963 ExplodedNodeSet DstPostPostCallCallback;
964 getCheckerManager().runCheckersForPostCall(Dst&: DstPostPostCallCallback,
965 Src: DstPostValue, Call: *Call, Eng&: *this);
966 for (ExplodedNode *I : DstPostPostCallCallback) {
967 getCheckerManager().runCheckersForNewAllocator(Call: *Call, Dst, Pred: I, Eng&: *this);
968 }
969}
970
971void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
972 ExplodedNodeSet &Dst) {
973 // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
974 // Also, we need to decide how allocators actually work -- they're not
975 // really part of the CXXNewExpr because they happen BEFORE the
976 // CXXConstructExpr subexpression. See PR12014 for some discussion.
977
978 unsigned blockCount = currBldrCtx->blockCount();
979 const LocationContext *LCtx = Pred->getLocationContext();
980 SVal symVal = UnknownVal();
981 FunctionDecl *FD = CNE->getOperatorNew();
982
983 bool IsStandardGlobalOpNewFunction =
984 FD->isReplaceableGlobalAllocationFunction();
985
986 ProgramStateRef State = Pred->getState();
987
988 // Retrieve the stored operator new() return value.
989 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
990 symVal = *getObjectUnderConstruction(State, Item: CNE, LC: LCtx);
991 State = finishObjectConstruction(State, Item: CNE, LC: LCtx);
992 }
993
994 // We assume all standard global 'operator new' functions allocate memory in
995 // heap. We realize this is an approximation that might not correctly model
996 // a custom global allocator.
997 if (symVal.isUnknown()) {
998 if (IsStandardGlobalOpNewFunction)
999 symVal = svalBuilder.getConjuredHeapSymbolVal(elem: getCFGElementRef(), LCtx,
1000 type: CNE->getType(), Count: blockCount);
1001 else
1002 symVal = svalBuilder.conjureSymbolVal(
1003 /*symbolTag=*/nullptr, elem: getCFGElementRef(), LCtx, count: blockCount);
1004 }
1005
1006 CallEventManager &CEMgr = getStateManager().getCallEventManager();
1007 CallEventRef<CXXAllocatorCall> Call =
1008 CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef());
1009
1010 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
1011 // Invalidate placement args.
1012 // FIXME: Once we figure out how we want allocators to work,
1013 // we should be using the usual pre-/(default-)eval-/post-call checkers
1014 // here.
1015 State = Call->invalidateRegions(BlockCount: blockCount, State);
1016 if (!State)
1017 return;
1018
1019 // If this allocation function is not declared as non-throwing, failures
1020 // /must/ be signalled by exceptions, and thus the return value will never
1021 // be NULL. -fno-exceptions does not influence this semantics.
1022 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
1023 // where new can return NULL. If we end up supporting that option, we can
1024 // consider adding a check for it here.
1025 // C++11 [basic.stc.dynamic.allocation]p3.
1026 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>())
1027 if (!ProtoType->isNothrow())
1028 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
1029 State = State->assume(Cond: *dSymVal, Assumption: true);
1030 }
1031
1032 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1033
1034 SVal Result = symVal;
1035
1036 if (CNE->isArray()) {
1037
1038 if (const auto *NewReg = cast_or_null<SubRegion>(Val: symVal.getAsRegion())) {
1039 // If each element is initialized by their default constructor, the field
1040 // values are properly placed inside the required region, however if an
1041 // initializer list is used, this doesn't happen automatically.
1042 auto *Init = CNE->getInitializer();
1043 bool isInitList =
1044 isa_and_nonnull<InitListExpr, CXXParenListInitExpr>(Val: Init);
1045
1046 QualType ObjTy =
1047 isInitList ? Init->getType() : CNE->getType()->getPointeeType();
1048 const ElementRegion *EleReg =
1049 MRMgr.getElementRegion(elementType: ObjTy, Idx: svalBuilder.makeArrayIndex(idx: 0), superRegion: NewReg,
1050 Ctx: svalBuilder.getContext());
1051 Result = loc::MemRegionVal(EleReg);
1052
1053 // If the array is list initialized, we bind the initializer list to the
1054 // memory region here, otherwise we would lose it.
1055 if (isInitList) {
1056 Bldr.takeNodes(N: Pred);
1057 Pred = Bldr.generateNode(S: CNE, Pred, St: State);
1058
1059 SVal V = State->getSVal(Ex: Init, LCtx);
1060 ExplodedNodeSet evaluated;
1061 evalBind(Dst&: evaluated, StoreE: CNE, Pred, location: Result, Val: V, AtDeclInit: true);
1062
1063 Bldr.takeNodes(N: Pred);
1064 Bldr.addNodes(S: evaluated);
1065
1066 Pred = *evaluated.begin();
1067 State = Pred->getState();
1068 }
1069 }
1070
1071 State = State->BindExpr(S: CNE, LCtx: Pred->getLocationContext(), V: Result);
1072 Bldr.generateNode(S: CNE, Pred, St: State);
1073 return;
1074 }
1075
1076 // FIXME: Once we have proper support for CXXConstructExprs inside
1077 // CXXNewExpr, we need to make sure that the constructed object is not
1078 // immediately invalidated here. (The placement call should happen before
1079 // the constructor call anyway.)
1080 if (FD->isReservedGlobalPlacementOperator()) {
1081 // Non-array placement new should always return the placement location.
1082 SVal PlacementLoc = State->getSVal(Ex: CNE->getPlacementArg(I: 0), LCtx);
1083 Result = svalBuilder.evalCast(V: PlacementLoc, CastTy: CNE->getType(),
1084 OriginalTy: CNE->getPlacementArg(I: 0)->getType());
1085 }
1086
1087 // Bind the address of the object, then check to see if we cached out.
1088 State = State->BindExpr(S: CNE, LCtx, V: Result);
1089 ExplodedNode *NewN = Bldr.generateNode(S: CNE, Pred, St: State);
1090 if (!NewN)
1091 return;
1092
1093 // If the type is not a record, we won't have a CXXConstructExpr as an
1094 // initializer. Copy the value over.
1095 if (const Expr *Init = CNE->getInitializer()) {
1096 if (!isa<CXXConstructExpr>(Val: Init)) {
1097 assert(Bldr.getResults().size() == 1);
1098 Bldr.takeNodes(N: NewN);
1099 evalBind(Dst, StoreE: CNE, Pred: NewN, location: Result, Val: State->getSVal(Ex: Init, LCtx),
1100 /*FirstInit=*/AtDeclInit: IsStandardGlobalOpNewFunction);
1101 }
1102 }
1103}
1104
1105void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
1106 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
1107
1108 CallEventManager &CEMgr = getStateManager().getCallEventManager();
1109 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
1110 E: CDE, State: Pred->getState(), LCtx: Pred->getLocationContext(), ElemRef: getCFGElementRef());
1111
1112 ExplodedNodeSet DstPreCall;
1113 getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred, Call: *Call, Eng&: *this);
1114 ExplodedNodeSet DstPostCall;
1115
1116 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
1117 NodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx);
1118 for (ExplodedNode *I : DstPreCall) {
1119 // Intentionally either inline or conservative eval-call the operator
1120 // delete, but avoid triggering an eval-call event for checkers.
1121 // As detailed at handling CXXNewExprs, in short, because it does not
1122 // really make sense to eval-call user-provided functions.
1123 defaultEvalCall(B&: Bldr, Pred: I, Call: *Call);
1124 }
1125 } else {
1126 DstPostCall = DstPreCall;
1127 }
1128 getCheckerManager().runCheckersForPostCall(Dst, Src: DstPostCall, Call: *Call, Eng&: *this);
1129}
1130
1131void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
1132 ExplodedNodeSet &Dst) {
1133 const VarDecl *VD = CS->getExceptionDecl();
1134 if (!VD) {
1135 Dst.Add(N: Pred);
1136 return;
1137 }
1138
1139 const LocationContext *LCtx = Pred->getLocationContext();
1140 SVal V = svalBuilder.conjureSymbolVal(elem: getCFGElementRef(), LCtx, type: VD->getType(),
1141 visitCount: currBldrCtx->blockCount());
1142 ProgramStateRef state = Pred->getState();
1143 state = state->bindLoc(location: state->getLValue(VD, LC: LCtx), V, LCtx);
1144
1145 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1146 Bldr.generateNode(S: CS, Pred, St: state);
1147}
1148
1149void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
1150 ExplodedNodeSet &Dst) {
1151 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1152
1153 // Get the this object region from StoreManager.
1154 const LocationContext *LCtx = Pred->getLocationContext();
1155 const MemRegion *R =
1156 svalBuilder.getRegionManager().getCXXThisRegion(
1157 thisPointerTy: getContext().getCanonicalType(T: TE->getType()),
1158 LC: LCtx);
1159
1160 ProgramStateRef state = Pred->getState();
1161 SVal V = state->getSVal(LV: loc::MemRegionVal(R));
1162 Bldr.generateNode(S: TE, Pred, St: state->BindExpr(S: TE, LCtx, V));
1163}
1164
1165void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
1166 ExplodedNodeSet &Dst) {
1167 const LocationContext *LocCtxt = Pred->getLocationContext();
1168
1169 // Get the region of the lambda itself.
1170 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
1171 Ex: LE, LC: LocCtxt);
1172 SVal V = loc::MemRegionVal(R);
1173
1174 ProgramStateRef State = Pred->getState();
1175
1176 // If we created a new MemRegion for the lambda, we should explicitly bind
1177 // the captures.
1178 for (auto const [Idx, FieldForCapture, InitExpr] :
1179 llvm::zip(t: llvm::seq<unsigned>(Begin: 0, End: -1), u: LE->getLambdaClass()->fields(),
1180 args: LE->capture_inits())) {
1181 SVal FieldLoc = State->getLValue(decl: FieldForCapture, Base: V);
1182
1183 SVal InitVal;
1184 if (!FieldForCapture->hasCapturedVLAType()) {
1185 assert(InitExpr && "Capture missing initialization expression");
1186
1187 // Capturing a 0 length array is a no-op, so we ignore it to get a more
1188 // accurate analysis. If it's not ignored, it would set the default
1189 // binding of the lambda to 'Unknown', which can lead to falsely detecting
1190 // 'Uninitialized' values as 'Unknown' and not reporting a warning.
1191 const auto FTy = FieldForCapture->getType();
1192 if (FTy->isConstantArrayType() &&
1193 getContext().getConstantArrayElementCount(
1194 CA: getContext().getAsConstantArrayType(T: FTy)) == 0)
1195 continue;
1196
1197 // With C++17 copy elision the InitExpr can be anything, so instead of
1198 // pattern matching all cases, we simple check if the current field is
1199 // under construction or not, regardless what it's InitExpr is.
1200 if (const auto OUC =
1201 getObjectUnderConstruction(State, Item: {LE, Idx}, LC: LocCtxt)) {
1202 InitVal = State->getSVal(R: OUC->getAsRegion());
1203
1204 State = finishObjectConstruction(State, Item: {LE, Idx}, LC: LocCtxt);
1205 } else
1206 InitVal = State->getSVal(Ex: InitExpr, LCtx: LocCtxt);
1207
1208 } else {
1209
1210 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) &&
1211 "VLA capture by value is a compile time error!");
1212
1213 // The field stores the length of a captured variable-length array.
1214 // These captures don't have initialization expressions; instead we
1215 // get the length from the VLAType size expression.
1216 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
1217 InitVal = State->getSVal(Ex: SizeExpr, LCtx: LocCtxt);
1218 }
1219
1220 State = State->bindLoc(LV: FieldLoc, V: InitVal, LCtx: LocCtxt);
1221 }
1222
1223 // Decay the Loc into an RValue, because there might be a
1224 // MaterializeTemporaryExpr node above this one which expects the bound value
1225 // to be an RValue.
1226 SVal LambdaRVal = State->getSVal(R);
1227
1228 ExplodedNodeSet Tmp;
1229 NodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
1230 // FIXME: is this the right program point kind?
1231 Bldr.generateNode(S: LE, Pred,
1232 St: State->BindExpr(S: LE, LCtx: LocCtxt, V: LambdaRVal),
1233 tag: nullptr, K: ProgramPoint::PostLValueKind);
1234
1235 // FIXME: Move all post/pre visits to ::Visit().
1236 getCheckerManager().runCheckersForPostStmt(Dst, Src: Tmp, S: LE, Eng&: *this);
1237}
1238
1239void ExprEngine::VisitAttributedStmt(const AttributedStmt *A,
1240 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
1241 ExplodedNodeSet CheckerPreStmt;
1242 getCheckerManager().runCheckersForPreStmt(Dst&: CheckerPreStmt, Src: Pred, S: A, Eng&: *this);
1243
1244 ExplodedNodeSet EvalSet;
1245 NodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx);
1246
1247 for (const auto *Attr : getSpecificAttrs<CXXAssumeAttr>(container: A->getAttrs())) {
1248 for (ExplodedNode *N : CheckerPreStmt) {
1249 Visit(S: Attr->getAssumption()->IgnoreParens(), Pred: N, Dst&: EvalSet);
1250 }
1251 }
1252
1253 getCheckerManager().runCheckersForPostStmt(Dst, Src: EvalSet, S: A, Eng&: *this);
1254}
1255