1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31static bool RetValue(InterpState &S, CodePtr &Pt) {
32 llvm::report_fatal_error(reason: "Interpreter cannot return values");
33}
34
35//===----------------------------------------------------------------------===//
36// Jmp, Jt, Jf
37//===----------------------------------------------------------------------===//
38
39static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
40 PC += Offset;
41 return true;
42}
43
44static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
45 if (S.Stk.pop<bool>()) {
46 PC += Offset;
47 }
48 return true;
49}
50
51static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
52 if (!S.Stk.pop<bool>()) {
53 PC += Offset;
54 }
55 return true;
56}
57
58// https://github.com/llvm/llvm-project/issues/102513
59#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
60#pragma optimize("", off)
61#endif
62// FIXME: We have the large switch over all opcodes here again, and in
63// Interpret().
64static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset, PrimType PT) {
65 [[maybe_unused]] CodePtr PCBefore = RealPC;
66 size_t StackSizeBefore = S.Stk.size();
67
68 auto SpeculativeInterp = [&S, RealPC]() -> bool {
69 const InterpFrame *StartFrame = S.Current;
70 CodePtr PC = RealPC;
71
72 for (;;) {
73 auto Op = PC.read<Opcode>();
74 if (Op == OP_EndSpeculation)
75 return true;
76 CodePtr OpPC = PC;
77
78 switch (Op) {
79#define GET_INTERP
80#include "Opcodes.inc"
81#undef GET_INTERP
82 }
83 }
84 llvm_unreachable("We didn't see an EndSpeculation op?");
85 };
86
87 if (SpeculativeInterp()) {
88 if (PT == PT_Ptr) {
89 const auto &Ptr = S.Stk.pop<Pointer>();
90 assert(S.Stk.size() == StackSizeBefore);
91 S.Stk.push<Integral<32, true>>(
92 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
93 } else {
94 // Pop the result from the stack and return success.
95 TYPE_SWITCH(PT, S.Stk.pop<T>(););
96 assert(S.Stk.size() == StackSizeBefore);
97 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
98 }
99 } else {
100 if (!S.inConstantContext())
101 return Invalid(S, OpPC: RealPC);
102
103 S.Stk.clearTo(NewSize: StackSizeBefore);
104 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
105 }
106
107 // RealPC should not have been modified.
108 assert(*RealPC == *PCBefore);
109
110 // Jump to end label. This is a little tricker than just RealPC += Offset
111 // because our usual jump instructions don't have any arguments, to the offset
112 // we get is a little too much and we need to subtract the size of the
113 // bool and PrimType arguments again.
114 int32_t ParamSize = align(Size: sizeof(PrimType));
115 assert(Offset >= ParamSize);
116 RealPC += Offset - ParamSize;
117
118 [[maybe_unused]] CodePtr PCCopy = RealPC;
119 assert(PCCopy.read<Opcode>() == OP_EndSpeculation);
120
121 return true;
122}
123// https://github.com/llvm/llvm-project/issues/102513
124#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
125#pragma optimize("", on)
126#endif
127
128static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
129 const ValueDecl *VD) {
130 const SourceInfo &E = S.Current->getSource(PC: OpPC);
131 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
132 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
133}
134
135static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
136 const ValueDecl *VD);
137static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
138 const ValueDecl *D) {
139 // This function tries pretty hard to produce a good diagnostic. Just skip
140 // tha if nobody will see it anyway.
141 if (!S.diagnosing())
142 return false;
143
144 if (isa<ParmVarDecl>(Val: D)) {
145 if (D->getType()->isReferenceType())
146 return false;
147
148 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
149 if (S.getLangOpts().CPlusPlus11) {
150 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown) << D;
151 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
152 } else {
153 S.FFDiag(SI: Loc);
154 }
155 return false;
156 }
157
158 if (!D->getType().isConstQualified()) {
159 diagnoseNonConstVariable(S, OpPC, VD: D);
160 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
161 if (!VD->getAnyInitializer()) {
162 diagnoseMissingInitializer(S, OpPC, VD);
163 } else {
164 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
165 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
166 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
167 }
168 }
169
170 return false;
171}
172
173static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
174 const ValueDecl *VD) {
175 if (!S.diagnosing())
176 return;
177
178 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
179 if (!S.getLangOpts().CPlusPlus) {
180 S.FFDiag(SI: Loc);
181 return;
182 }
183
184 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
185 VarD && VarD->getType().isConstQualified() &&
186 !VarD->getAnyInitializer()) {
187 diagnoseMissingInitializer(S, OpPC, VD);
188 return;
189 }
190
191 // Rather random, but this is to match the diagnostic output of the current
192 // interpreter.
193 if (isa<ObjCIvarDecl>(Val: VD))
194 return;
195
196 if (VD->getType()->isIntegralOrEnumerationType()) {
197 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
198 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
199 return;
200 }
201
202 S.FFDiag(SI: Loc,
203 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
204 : diag::note_constexpr_ltor_non_integral,
205 ExtraNotes: 1)
206 << VD << VD->getType();
207 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
208}
209
210static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
211 AccessKinds AK) {
212 if (auto ID = Ptr.getDeclID()) {
213 if (!Ptr.isStaticTemporary())
214 return true;
215
216 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
217 Val: Ptr.getDeclDesc()->asExpr());
218 if (!MTE)
219 return true;
220
221 // FIXME(perf): Since we do this check on every Load from a static
222 // temporary, it might make sense to cache the value of the
223 // isUsableInConstantExpressions call.
224 if (!MTE->isUsableInConstantExpressions(Context: S.getASTContext()) &&
225 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
226 const SourceInfo &E = S.Current->getSource(PC: OpPC);
227 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
228 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
229 return false;
230 }
231 }
232 return true;
233}
234
235static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
236 if (auto ID = Ptr.getDeclID()) {
237 if (!Ptr.isStatic())
238 return true;
239
240 if (S.P.getCurrentDecl() == ID)
241 return true;
242
243 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
244 return false;
245 }
246 return true;
247}
248
249namespace clang {
250namespace interp {
251static void popArg(InterpState &S, const Expr *Arg) {
252 PrimType Ty = S.getContext().classify(E: Arg).value_or(u: PT_Ptr);
253 TYPE_SWITCH(Ty, S.Stk.discard<T>());
254}
255
256void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
257 const Function *Func) {
258 assert(S.Current);
259 assert(Func);
260
261 if (S.Current->Caller && Func->isVariadic()) {
262 // CallExpr we're look for is at the return PC of the current function, i.e.
263 // in the caller.
264 // This code path should be executed very rarely.
265 unsigned NumVarArgs;
266 const Expr *const *Args = nullptr;
267 unsigned NumArgs = 0;
268 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
269 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
270 Args = CE->getArgs();
271 NumArgs = CE->getNumArgs();
272 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
273 Args = CE->getArgs();
274 NumArgs = CE->getNumArgs();
275 } else
276 assert(false && "Can't get arguments from that expression type");
277
278 assert(NumArgs >= Func->getNumWrittenParams());
279 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
280 isa<CXXOperatorCallExpr>(Val: CallSite));
281 for (unsigned I = 0; I != NumVarArgs; ++I) {
282 const Expr *A = Args[NumArgs - 1 - I];
283 popArg(S, Arg: A);
284 }
285 }
286
287 // And in any case, remove the fixed parameters (the non-variadic ones)
288 // at the end.
289 for (PrimType Ty : Func->args_reverse())
290 TYPE_SWITCH(Ty, S.Stk.discard<T>());
291}
292
293bool isConstexprUnknown(const Pointer &P) {
294 if (!P.isBlockPointer())
295 return false;
296
297 if (P.isDummy())
298 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
299
300 return P.getDeclDesc()->IsConstexprUnknown;
301}
302
303bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
304 if (Ptr.isDummy())
305 return false;
306 if (Ptr.isZero())
307 return true;
308 if (Ptr.isFunctionPointer())
309 return false;
310 if (Ptr.isIntegralPointer())
311 return true;
312 if (Ptr.isTypeidPointer())
313 return true;
314
315 if (Ptr.getType()->isAnyComplexType())
316 return true;
317
318 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
319 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
320 return false;
321}
322
323bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
324 AccessKinds AK) {
325 if (Ptr.isActive())
326 return true;
327
328 assert(Ptr.inUnion());
329 assert(Ptr.isField() && Ptr.getField());
330
331 Pointer U = Ptr.getBase();
332 Pointer C = Ptr;
333 while (!U.isRoot() && !U.isActive()) {
334 // A little arbitrary, but this is what the current interpreter does.
335 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
336 // GCC's output is more similar to what we would get without
337 // this condition.
338 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
339 break;
340
341 C = U;
342 U = U.getBase();
343 }
344 assert(C.isField());
345
346 // Consider:
347 // union U {
348 // struct {
349 // int x;
350 // int y;
351 // } a;
352 // }
353 //
354 // When activating x, we will also activate a. If we now try to read
355 // from y, we will get to CheckActive, because y is not active. In that
356 // case, our U will be a (not a union). We return here and let later code
357 // handle this.
358 if (!U.getFieldDesc()->isUnion())
359 return true;
360
361 // Get the inactive field descriptor.
362 assert(!C.isActive());
363 const FieldDecl *InactiveField = C.getField();
364 assert(InactiveField);
365
366 // Find the active field of the union.
367 const Record *R = U.getRecord();
368 assert(R && R->isUnion() && "Not a union");
369
370 const FieldDecl *ActiveField = nullptr;
371 for (const Record::Field &F : R->fields()) {
372 const Pointer &Field = U.atField(Off: F.Offset);
373 if (Field.isActive()) {
374 ActiveField = Field.getField();
375 break;
376 }
377 }
378
379 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
380 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
381 << AK << InactiveField << !ActiveField << ActiveField;
382 return false;
383}
384
385bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
386 if (!Ptr.isExtern())
387 return true;
388
389 if (Ptr.isInitialized() ||
390 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
391 return true;
392
393 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
394 Ptr.isConst())
395 return false;
396
397 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
398 diagnoseNonConstVariable(S, OpPC, VD);
399 return false;
400}
401
402bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
403 if (!Ptr.isUnknownSizeArray())
404 return true;
405 const SourceInfo &E = S.Current->getSource(PC: OpPC);
406 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
407 return false;
408}
409
410bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
411 AccessKinds AK) {
412 if (Ptr.isZero()) {
413 const auto &Src = S.Current->getSource(PC: OpPC);
414
415 if (Ptr.isField())
416 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
417 else
418 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
419
420 return false;
421 }
422
423 if (!Ptr.isLive()) {
424 const auto &Src = S.Current->getSource(PC: OpPC);
425
426 if (Ptr.isDynamic()) {
427 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
428 } else if (!S.checkingPotentialConstantExpression()) {
429 bool IsTemp = Ptr.isTemporary();
430 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_lifetime_ended, ExtraNotes: 1) << AK << !IsTemp;
431
432 if (IsTemp)
433 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
434 else
435 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
436 }
437
438 return false;
439 }
440
441 return true;
442}
443
444bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
445 assert(Desc);
446
447 const auto *D = Desc->asVarDecl();
448 if (!D || !D->hasGlobalStorage())
449 return true;
450
451 if (D == S.EvaluatingDecl)
452 return true;
453
454 if (D->isConstexpr())
455 return true;
456
457 // If we're evaluating the initializer for a constexpr variable in C23, we may
458 // only read other contexpr variables. Abort here since this one isn't
459 // constexpr.
460 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
461 VD && VD->isConstexpr() && S.getLangOpts().C23)
462 return Invalid(S, OpPC);
463
464 QualType T = D->getType();
465 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
466 if (T->isIntegralOrEnumerationType()) {
467 if (!IsConstant) {
468 diagnoseNonConstVariable(S, OpPC, VD: D);
469 return false;
470 }
471 return true;
472 }
473
474 if (IsConstant) {
475 if (S.getLangOpts().CPlusPlus) {
476 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
477 DiagId: S.getLangOpts().CPlusPlus11
478 ? diag::note_constexpr_ltor_non_constexpr
479 : diag::note_constexpr_ltor_non_integral,
480 ExtraNotes: 1)
481 << D << T;
482 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
483 } else {
484 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
485 }
486 return true;
487 }
488
489 if (T->isPointerOrReferenceType()) {
490 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
491 !S.getLangOpts().CPlusPlus11) {
492 diagnoseNonConstVariable(S, OpPC, VD: D);
493 return false;
494 }
495 return true;
496 }
497
498 diagnoseNonConstVariable(S, OpPC, VD: D);
499 return false;
500}
501
502static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
503 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
504 return true;
505 if (!Ptr.getDeclID())
506 return true;
507 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
508}
509
510bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
511 CheckSubobjectKind CSK) {
512 if (!Ptr.isZero())
513 return true;
514 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
515 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
516 << CSK << S.Current->getRange(PC: OpPC);
517
518 return false;
519}
520
521bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
522 AccessKinds AK) {
523 if (!Ptr.isOnePastEnd())
524 return true;
525 if (S.getLangOpts().CPlusPlus) {
526 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
527 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
528 << AK << S.Current->getRange(PC: OpPC);
529 }
530 return false;
531}
532
533bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
534 CheckSubobjectKind CSK) {
535 if (!Ptr.isElementPastEnd())
536 return true;
537 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
538 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
539 << CSK << S.Current->getRange(PC: OpPC);
540 return false;
541}
542
543bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
544 CheckSubobjectKind CSK) {
545 if (!Ptr.isOnePastEnd())
546 return true;
547
548 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
549 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
550 << CSK << S.Current->getRange(PC: OpPC);
551 return false;
552}
553
554bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
555 uint32_t Offset) {
556 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
557 uint32_t PtrOffset = Ptr.getByteOffset();
558
559 // We subtract Offset from PtrOffset. The result must be at least
560 // MinOffset.
561 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
562 return true;
563
564 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
565 QualType TargetQT = E->getType()->getPointeeType();
566 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
567
568 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
569 << MostDerivedQT << TargetQT;
570
571 return false;
572}
573
574bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
575 assert(Ptr.isLive() && "Pointer is not live");
576 if (!Ptr.isConst() || Ptr.isMutable())
577 return true;
578
579 // The This pointer is writable in constructors and destructors,
580 // even if isConst() returns true.
581 // TODO(perf): We could be hitting this code path quite a lot in complex
582 // constructors. Is there a better way to do this?
583 if (S.Current->getFunction()) {
584 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
585 if (const Function *Func = Frame->getFunction();
586 Func && (Func->isConstructor() || Func->isDestructor()) &&
587 Ptr.block() == Frame->getThis().block()) {
588 return true;
589 }
590 }
591 }
592
593 if (!Ptr.isBlockPointer())
594 return false;
595
596 const QualType Ty = Ptr.getType();
597 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
598 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
599 return false;
600}
601
602bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
603 assert(Ptr.isLive() && "Pointer is not live");
604 if (!Ptr.isMutable())
605 return true;
606
607 // In C++14 onwards, it is permitted to read a mutable member whose
608 // lifetime began within the evaluation.
609 if (S.getLangOpts().CPlusPlus14 &&
610 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) {
611 // FIXME: This check is necessary because (of the way) we revisit
612 // variables in Compiler.cpp:visitDeclRef. Revisiting a so far
613 // unknown variable will get the same EvalID and we end up allowing
614 // reads from mutable members of it.
615 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
616 return false;
617 return true;
618 }
619
620 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
621 const FieldDecl *Field = Ptr.getField();
622 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
623 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
624 return false;
625}
626
627static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
628 AccessKinds AK) {
629 assert(Ptr.isLive());
630
631 if (!Ptr.isVolatile())
632 return true;
633
634 if (!S.getLangOpts().CPlusPlus)
635 return Invalid(S, OpPC);
636
637 // The reason why Ptr is volatile might be further up the hierarchy.
638 // Find that pointer.
639 Pointer P = Ptr;
640 while (!P.isRoot()) {
641 if (P.getType().isVolatileQualified())
642 break;
643 P = P.getBase();
644 }
645
646 const NamedDecl *ND = nullptr;
647 int DiagKind;
648 SourceLocation Loc;
649 if (const auto *F = P.getField()) {
650 DiagKind = 2;
651 Loc = F->getLocation();
652 ND = F;
653 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
654 DiagKind = 1;
655 Loc = VD->getLocation();
656 ND = VD;
657 } else {
658 DiagKind = 0;
659 if (const auto *E = P.getFieldDesc()->asExpr())
660 Loc = E->getExprLoc();
661 }
662
663 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
664 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
665 << AK << DiagKind << ND;
666 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
667 return false;
668}
669
670bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
671 AccessKinds AK) {
672 assert(Ptr.isLive());
673
674 if (Ptr.isInitialized())
675 return true;
676
677 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
678 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
679
680 if (VD == S.EvaluatingDecl &&
681 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
682 if (!S.getLangOpts().CPlusPlus14 &&
683 !VD->getType().isConstant(Ctx: S.getASTContext())) {
684 // Diagnose as non-const read.
685 diagnoseNonConstVariable(S, OpPC, VD);
686 } else {
687 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
688 // Diagnose as "read of object outside its lifetime".
689 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
690 << AK << /*IsIndeterminate=*/false;
691 }
692 return false;
693 }
694
695 if (VD->getAnyInitializer()) {
696 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
697 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
698 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
699 } else {
700 diagnoseMissingInitializer(S, OpPC, VD);
701 }
702 return false;
703 }
704
705 if (!S.checkingPotentialConstantExpression()) {
706 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
707 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
708 }
709 return false;
710}
711
712static bool CheckLifetime(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
713 AccessKinds AK) {
714 if (Ptr.getLifetime() == Lifetime::Started)
715 return true;
716
717 if (!S.checkingPotentialConstantExpression()) {
718 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
719 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
720 }
721 return false;
722}
723
724bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
725 if (Ptr.isInitialized())
726 return true;
727
728 assert(S.getLangOpts().CPlusPlus);
729 const auto *VD = cast<VarDecl>(Val: Ptr.getDeclDesc()->asValueDecl());
730 if ((!VD->hasConstantInitialization() &&
731 VD->mightBeUsableInConstantExpressions(C: S.getASTContext())) ||
732 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
733 !VD->hasICEInitializer(Context: S.getASTContext()))) {
734 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
735 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
736 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
737 }
738 return false;
739}
740
741static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
742 if (!Ptr.isWeak())
743 return true;
744
745 const auto *VD = Ptr.getDeclDesc()->asVarDecl();
746 assert(VD);
747 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
748 << VD;
749 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
750
751 return false;
752}
753
754bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
755 AccessKinds AK) {
756 if (!CheckLive(S, OpPC, Ptr, AK))
757 return false;
758 if (!CheckExtern(S, OpPC, Ptr))
759 return false;
760 if (!CheckConstant(S, OpPC, Ptr))
761 return false;
762 if (!CheckDummy(S, OpPC, Ptr, AK))
763 return false;
764 if (!CheckRange(S, OpPC, Ptr, AK))
765 return false;
766 if (!CheckActive(S, OpPC, Ptr, AK))
767 return false;
768 if (!CheckLifetime(S, OpPC, Ptr, AK))
769 return false;
770 if (!CheckInitialized(S, OpPC, Ptr, AK))
771 return false;
772 if (!CheckTemporary(S, OpPC, Ptr, AK))
773 return false;
774 if (!CheckWeak(S, OpPC, Ptr))
775 return false;
776 if (!CheckMutable(S, OpPC, Ptr))
777 return false;
778 if (!CheckVolatile(S, OpPC, Ptr, AK))
779 return false;
780 return true;
781}
782
783/// This is not used by any of the opcodes directly. It's used by
784/// EvalEmitter to do the final lvalue-to-rvalue conversion.
785bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
786 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
787 return false;
788 if (!CheckConstant(S, OpPC, Ptr))
789 return false;
790
791 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Read))
792 return false;
793 if (!CheckExtern(S, OpPC, Ptr))
794 return false;
795 if (!CheckRange(S, OpPC, Ptr, AK: AK_Read))
796 return false;
797 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
798 return false;
799 if (!CheckLifetime(S, OpPC, Ptr, AK: AK_Read))
800 return false;
801 if (!CheckInitialized(S, OpPC, Ptr, AK: AK_Read))
802 return false;
803 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Read))
804 return false;
805 if (!CheckWeak(S, OpPC, Ptr))
806 return false;
807 if (!CheckMutable(S, OpPC, Ptr))
808 return false;
809 return true;
810}
811
812bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
813 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
814 return false;
815 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Assign))
816 return false;
817 if (!CheckLifetime(S, OpPC, Ptr, AK: AK_Assign))
818 return false;
819 if (!CheckExtern(S, OpPC, Ptr))
820 return false;
821 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
822 return false;
823 if (!CheckGlobal(S, OpPC, Ptr))
824 return false;
825 if (!CheckConst(S, OpPC, Ptr))
826 return false;
827 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
828 return false;
829 return true;
830}
831
832bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
833 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
834 return false;
835 if (!Ptr.isDummy()) {
836 if (!CheckExtern(S, OpPC, Ptr))
837 return false;
838 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
839 return false;
840 }
841 return true;
842}
843
844bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
845 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
846 return false;
847 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
848 return false;
849 return true;
850}
851
852bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
853
854 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
855 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
856 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
857 return false;
858 }
859
860 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
861 return false;
862
863 if (F->isValid() && F->hasBody() && F->isConstexpr())
864 return true;
865
866 // Implicitly constexpr.
867 if (F->isLambdaStaticInvoker())
868 return true;
869
870 // Bail out if the function declaration itself is invalid. We will
871 // have produced a relevant diagnostic while parsing it, so just
872 // note the problematic sub-expression.
873 if (F->getDecl()->isInvalidDecl())
874 return Invalid(S, OpPC);
875
876 // Diagnose failed assertions specially.
877 if (S.Current->getLocation(PC: OpPC).isMacroID() &&
878 F->getDecl()->getIdentifier()) {
879 // FIXME: Instead of checking for an implementation-defined function,
880 // check and evaluate the assert() macro.
881 StringRef Name = F->getDecl()->getName();
882 bool AssertFailed =
883 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
884 if (AssertFailed) {
885 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
886 DiagId: diag::note_constexpr_assert_failed);
887 return false;
888 }
889 }
890
891 if (S.getLangOpts().CPlusPlus11) {
892 const FunctionDecl *DiagDecl = F->getDecl();
893
894 // Invalid decls have been diagnosed before.
895 if (DiagDecl->isInvalidDecl())
896 return false;
897
898 // If this function is not constexpr because it is an inherited
899 // non-constexpr constructor, diagnose that directly.
900 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
901 if (CD && CD->isInheritingConstructor()) {
902 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
903 if (!Inherited->isConstexpr())
904 DiagDecl = CD = Inherited;
905 }
906
907 // Silently reject constructors of invalid classes. The invalid class
908 // has been rejected elsewhere before.
909 if (CD && CD->getParent()->isInvalidDecl())
910 return false;
911
912 // FIXME: If DiagDecl is an implicitly-declared special member function
913 // or an inheriting constructor, we should be much more explicit about why
914 // it's not constexpr.
915 if (CD && CD->isInheritingConstructor()) {
916 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
917 DiagId: diag::note_constexpr_invalid_inhctor, ExtraNotes: 1)
918 << CD->getInheritedConstructor().getConstructor()->getParent();
919 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
920 } else {
921 // Don't emit anything if the function isn't defined and we're checking
922 // for a constant expression. It might be defined at the point we're
923 // actually calling it.
924 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
925 bool IsDefined = F->isDefined();
926 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
927 S.checkingPotentialConstantExpression())
928 return false;
929
930 // If the declaration is defined, declared 'constexpr' _and_ has a body,
931 // the below diagnostic doesn't add anything useful.
932 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
933 DiagDecl->hasBody())
934 return false;
935
936 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
937 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
938 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
939
940 if (DiagDecl->getDefinition())
941 S.Note(Loc: DiagDecl->getDefinition()->getLocation(),
942 DiagId: diag::note_declared_at);
943 else
944 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
945 }
946 } else {
947 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
948 DiagId: diag::note_invalid_subexpr_in_const_expr);
949 }
950
951 return false;
952}
953
954bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
955 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
956 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
957 DiagId: diag::note_constexpr_depth_limit_exceeded)
958 << S.getLangOpts().ConstexprCallDepth;
959 return false;
960 }
961
962 return true;
963}
964
965bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
966 if (!This.isZero())
967 return true;
968
969 const Expr *E = S.Current->getExpr(PC: OpPC);
970 if (S.getLangOpts().CPlusPlus11) {
971 bool IsImplicit = false;
972 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
973 IsImplicit = TE->isImplicit();
974 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
975 } else {
976 S.FFDiag(E);
977 }
978
979 return false;
980}
981
982bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
983 APFloat::opStatus Status, FPOptions FPO) {
984 // [expr.pre]p4:
985 // If during the evaluation of an expression, the result is not
986 // mathematically defined [...], the behavior is undefined.
987 // FIXME: C++ rules require us to not conform to IEEE 754 here.
988 if (Result.isNan()) {
989 const SourceInfo &E = S.Current->getSource(PC: OpPC);
990 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
991 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
992 return S.noteUndefinedBehavior();
993 }
994
995 // In a constant context, assume that any dynamic rounding mode or FP
996 // exception state matches the default floating-point environment.
997 if (S.inConstantContext())
998 return true;
999
1000 if ((Status & APFloat::opInexact) &&
1001 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
1002 // Inexact result means that it depends on rounding mode. If the requested
1003 // mode is dynamic, the evaluation cannot be made in compile time.
1004 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1005 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
1006 return false;
1007 }
1008
1009 if ((Status != APFloat::opOK) &&
1010 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
1011 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
1012 FPO.getAllowFEnvAccess())) {
1013 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1014 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1015 return false;
1016 }
1017
1018 if ((Status & APFloat::opStatus::opInvalidOp) &&
1019 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1020 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1021 // There is no usefully definable result.
1022 S.FFDiag(SI: E);
1023 return false;
1024 }
1025
1026 return true;
1027}
1028
1029bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1030 if (S.getLangOpts().CPlusPlus20)
1031 return true;
1032
1033 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1034 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1035 return true;
1036}
1037
1038bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1039 DynamicAllocator::Form AllocForm,
1040 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1041 const Expr *NewExpr) {
1042 if (AllocForm == DeleteForm)
1043 return true;
1044
1045 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1046
1047 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1048 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1049 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1050 << TypeToDiagnose;
1051 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1052 << NewExpr->getSourceRange();
1053 return false;
1054}
1055
1056bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1057 const Pointer &Ptr) {
1058 // Regular new type(...) call.
1059 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1060 return true;
1061 // operator new.
1062 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1063 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1064 return true;
1065 // std::allocator.allocate() call
1066 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1067 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1068 return true;
1069
1070 // Whatever this is, we didn't heap allocate it.
1071 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1072 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1073 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1074
1075 if (Ptr.isTemporary())
1076 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1077 else
1078 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1079 return false;
1080}
1081
1082/// We aleady know the given DeclRefExpr is invalid for some reason,
1083/// now figure out why and print appropriate diagnostics.
1084bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1085 const ValueDecl *D = DR->getDecl();
1086 return diagnoseUnknownDecl(S, OpPC, D);
1087}
1088
1089bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1090 AccessKinds AK) {
1091 if (!Ptr.isDummy())
1092 return true;
1093
1094 const Descriptor *Desc = Ptr.getDeclDesc();
1095 const ValueDecl *D = Desc->asValueDecl();
1096 if (!D)
1097 return false;
1098
1099 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1100 return diagnoseUnknownDecl(S, OpPC, D);
1101
1102 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1103 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1104 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1105 }
1106 return false;
1107}
1108
1109bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1110 const CallExpr *CE, unsigned ArgSize) {
1111 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1112 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1113 unsigned Offset = 0;
1114 unsigned Index = 0;
1115 for (const Expr *Arg : Args) {
1116 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1117 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1118 if (ArgPtr.isZero()) {
1119 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1120 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1121 return false;
1122 }
1123 }
1124
1125 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(u: PT_Ptr)));
1126 ++Index;
1127 }
1128 return true;
1129}
1130
1131static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1132 const Pointer &BasePtr,
1133 const Descriptor *Desc) {
1134 assert(Desc->isRecord());
1135 const Record *R = Desc->ElemRecord;
1136 assert(R);
1137
1138 if (Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis()) &&
1139 S.Current->getFunction()->isDestructor()) {
1140 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1141 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1142 return false;
1143 }
1144
1145 // Destructor of this record.
1146 if (const CXXDestructorDecl *Dtor = R->getDestructor();
1147 Dtor && !Dtor->isTrivial()) {
1148 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1149 if (!DtorFunc)
1150 return false;
1151
1152 S.Stk.push<Pointer>(Args: BasePtr);
1153 if (!Call(S, OpPC, Func: DtorFunc, VarArgSize: 0))
1154 return false;
1155 }
1156 return true;
1157}
1158
1159static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1160 assert(B);
1161 const Descriptor *Desc = B->getDescriptor();
1162
1163 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1164 return true;
1165
1166 assert(Desc->isRecord() || Desc->isCompositeArray());
1167
1168 if (Desc->isCompositeArray()) {
1169 unsigned N = Desc->getNumElems();
1170 if (N == 0)
1171 return true;
1172 const Descriptor *ElemDesc = Desc->ElemDesc;
1173 assert(ElemDesc->isRecord());
1174
1175 Pointer RP(const_cast<Block *>(B));
1176 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1177 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1178 return false;
1179 }
1180 return true;
1181 }
1182
1183 assert(Desc->isRecord());
1184 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1185}
1186
1187static bool hasVirtualDestructor(QualType T) {
1188 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1189 if (const CXXDestructorDecl *DD = RD->getDestructor())
1190 return DD->isVirtual();
1191 return false;
1192}
1193
1194bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1195 bool IsGlobalDelete) {
1196 if (!CheckDynamicMemoryAllocation(S, OpPC))
1197 return false;
1198
1199 const Expr *Source = nullptr;
1200 const Block *BlockToDelete = nullptr;
1201 {
1202 // Extra scope for this so the block doesn't have this pointer
1203 // pointing to it when we destroy it.
1204 Pointer Ptr = S.Stk.pop<Pointer>();
1205
1206 // Deleteing nullptr is always fine.
1207 if (Ptr.isZero())
1208 return true;
1209
1210 // Remove base casts.
1211 QualType InitialType = Ptr.getType();
1212 while (Ptr.isBaseClass())
1213 Ptr = Ptr.getBase();
1214
1215 // For the non-array case, the types must match if the static type
1216 // does not have a virtual destructor.
1217 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1218 !hasVirtualDestructor(T: InitialType)) {
1219 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1220 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1221 << InitialType << Ptr.getType();
1222 return false;
1223 }
1224
1225 if (!Ptr.isRoot() || Ptr.isOnePastEnd() ||
1226 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1227 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1228 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1229 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1230 return false;
1231 }
1232
1233 Source = Ptr.getDeclDesc()->asExpr();
1234 BlockToDelete = Ptr.block();
1235
1236 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1237 return false;
1238
1239 // For a class type with a virtual destructor, the selected operator delete
1240 // is the one looked up when building the destructor.
1241 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1242 QualType AllocType = Ptr.getType();
1243 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1244 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1245 if (const CXXDestructorDecl *DD = RD->getDestructor())
1246 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1247 return nullptr;
1248 };
1249
1250 if (const FunctionDecl *VirtualDelete =
1251 getVirtualOperatorDelete(AllocType);
1252 VirtualDelete &&
1253 !VirtualDelete
1254 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1255 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1256 DiagId: diag::note_constexpr_new_non_replaceable)
1257 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1258 return false;
1259 }
1260 }
1261 }
1262 assert(Source);
1263 assert(BlockToDelete);
1264
1265 // Invoke destructors before deallocating the memory.
1266 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1267 return false;
1268
1269 DynamicAllocator &Allocator = S.getAllocator();
1270 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1271 std::optional<DynamicAllocator::Form> AllocForm =
1272 Allocator.getAllocationForm(Source);
1273
1274 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1275 // Nothing has been deallocated, this must be a double-delete.
1276 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1277 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1278 return false;
1279 }
1280
1281 assert(AllocForm);
1282 DynamicAllocator::Form DeleteForm = DeleteIsArrayForm
1283 ? DynamicAllocator::Form::Array
1284 : DynamicAllocator::Form::NonArray;
1285 return CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1286 NewExpr: Source);
1287}
1288
1289void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1290 const APSInt &Value) {
1291 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1292 return;
1293
1294 llvm::APInt Min;
1295 llvm::APInt Max;
1296 ED->getValueRange(Max, Min);
1297 --Max;
1298
1299 if (ED->getNumNegativeBits() &&
1300 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1301 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1302 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1303 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1304 << ED;
1305 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1306 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1307 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1308 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1309 << ED;
1310 }
1311}
1312
1313bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1314 assert(T);
1315 assert(!S.getLangOpts().CPlusPlus23);
1316
1317 // C++1y: A constant initializer for an object o [...] may also invoke
1318 // constexpr constructors for o and its subobjects even if those objects
1319 // are of non-literal class types.
1320 //
1321 // C++11 missed this detail for aggregates, so classes like this:
1322 // struct foo_t { union { int i; volatile int j; } u; };
1323 // are not (obviously) initializable like so:
1324 // __attribute__((__require_constant_initialization__))
1325 // static const foo_t x = {{0}};
1326 // because "i" is a subobject with non-literal initialization (due to the
1327 // volatile member of the union). See:
1328 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1329 // Therefore, we use the C++1y behavior.
1330
1331 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1332 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1333 return true;
1334 }
1335
1336 const Expr *E = S.Current->getExpr(PC: OpPC);
1337 if (S.getLangOpts().CPlusPlus11)
1338 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1339 else
1340 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1341 return false;
1342}
1343
1344static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1345 uint32_t Off) {
1346 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1347 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1348 return false;
1349
1350 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1351 return false;
1352 if (!CheckArray(S, OpPC, Ptr))
1353 return false;
1354 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1355 return false;
1356
1357 if (Ptr.isIntegralPointer()) {
1358 S.Stk.push<Pointer>(Args: Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off));
1359 return true;
1360 }
1361
1362 if (!Ptr.isBlockPointer()) {
1363 // FIXME: The only time we (seem to) get here is when trying to access a
1364 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1365 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1366 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1367 DiagId: diag::note_constexpr_access_unreadable_object)
1368 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1369 return false;
1370 }
1371
1372 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1373 return false;
1374
1375 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1376 return true;
1377}
1378
1379bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1380 const auto &Ptr = S.Stk.peek<Pointer>();
1381 return getField(S, OpPC, Ptr, Off);
1382}
1383
1384bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1385 const auto &Ptr = S.Stk.pop<Pointer>();
1386 return getField(S, OpPC, Ptr, Off);
1387}
1388
1389static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1390 const Pointer &ThisPtr) {
1391 assert(Func->isConstructor());
1392
1393 if (Func->getParentDecl()->isInvalidDecl())
1394 return false;
1395
1396 const Descriptor *D = ThisPtr.getFieldDesc();
1397 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1398 // subobject of a composite array.
1399 if (!D->ElemRecord)
1400 return true;
1401
1402 if (D->ElemRecord->getNumVirtualBases() == 0)
1403 return true;
1404
1405 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1406 << Func->getParentDecl();
1407 return false;
1408}
1409
1410bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1411 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1412 return false;
1413 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Destroy))
1414 return false;
1415 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1416 return false;
1417
1418 // Can't call a dtor on a global variable.
1419 if (Ptr.block()->isStatic()) {
1420 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1421 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1422 return false;
1423 }
1424 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1425}
1426
1427static void compileFunction(InterpState &S, const Function *Func) {
1428 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1429 .compileFunc(FuncDecl: Func->getDecl()->getMostRecentDecl(),
1430 Func: const_cast<Function *>(Func));
1431}
1432
1433bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1434 uint32_t VarArgSize) {
1435 if (Func->hasThisPointer()) {
1436 size_t ArgSize = Func->getArgSize() + VarArgSize;
1437 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1438 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1439
1440 // If the current function is a lambda static invoker and
1441 // the function we're about to call is a lambda call operator,
1442 // skip the CheckInvoke, since the ThisPtr is a null pointer
1443 // anyway.
1444 if (!(S.Current->getFunction() &&
1445 S.Current->getFunction()->isLambdaStaticInvoker() &&
1446 Func->isLambdaCallOperator())) {
1447 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1448 return false;
1449 }
1450
1451 if (S.checkingPotentialConstantExpression())
1452 return false;
1453 }
1454
1455 if (!Func->isFullyCompiled())
1456 compileFunction(S, Func);
1457
1458 if (!CheckCallable(S, OpPC, F: Func))
1459 return false;
1460
1461 if (!CheckCallDepth(S, OpPC))
1462 return false;
1463
1464 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1465 InterpFrame *FrameBefore = S.Current;
1466 S.Current = NewFrame.get();
1467
1468 // Note that we cannot assert(CallResult.hasValue()) here since
1469 // Ret() above only sets the APValue if the curent frame doesn't
1470 // have a caller set.
1471 if (Interpret(S)) {
1472 NewFrame.release(); // Frame was delete'd already.
1473 assert(S.Current == FrameBefore);
1474 return true;
1475 }
1476
1477 // Interpreting the function failed somehow. Reset to
1478 // previous state.
1479 S.Current = FrameBefore;
1480 return false;
1481}
1482bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1483 uint32_t VarArgSize) {
1484 assert(Func);
1485 auto cleanup = [&]() -> bool {
1486 cleanupAfterFunctionCall(S, OpPC, Func);
1487 return false;
1488 };
1489
1490 if (Func->hasThisPointer()) {
1491 size_t ArgSize = Func->getArgSize() + VarArgSize;
1492 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1493
1494 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1495
1496 // C++23 [expr.const]p5.6
1497 // an invocation of a virtual function ([class.virtual]) for an object whose
1498 // dynamic type is constexpr-unknown;
1499 if (ThisPtr.isDummy() && Func->isVirtual())
1500 return false;
1501
1502 // If the current function is a lambda static invoker and
1503 // the function we're about to call is a lambda call operator,
1504 // skip the CheckInvoke, since the ThisPtr is a null pointer
1505 // anyway.
1506 if (S.Current->getFunction() &&
1507 S.Current->getFunction()->isLambdaStaticInvoker() &&
1508 Func->isLambdaCallOperator()) {
1509 assert(ThisPtr.isZero());
1510 } else {
1511 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1512 return cleanup();
1513 if (!Func->isConstructor() && !Func->isDestructor() &&
1514 !Func->isCopyOrMoveOperator() &&
1515 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1516 return false;
1517 }
1518
1519 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1520 return false;
1521 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1522 return false;
1523 }
1524
1525 if (!Func->isFullyCompiled())
1526 compileFunction(S, Func);
1527
1528 if (!CheckCallable(S, OpPC, F: Func))
1529 return cleanup();
1530
1531 // FIXME: The isConstructor() check here is not always right. The current
1532 // constant evaluator is somewhat inconsistent in when it allows a function
1533 // call when checking for a constant expression.
1534 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1535 !Func->isConstructor())
1536 return cleanup();
1537
1538 if (!CheckCallDepth(S, OpPC))
1539 return cleanup();
1540
1541 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1542 InterpFrame *FrameBefore = S.Current;
1543 S.Current = NewFrame.get();
1544
1545 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1546 // Note that we cannot assert(CallResult.hasValue()) here since
1547 // Ret() above only sets the APValue if the curent frame doesn't
1548 // have a caller set.
1549 if (Interpret(S)) {
1550 NewFrame.release(); // Frame was delete'd already.
1551 assert(S.Current == FrameBefore);
1552 return true;
1553 }
1554
1555 // Interpreting the function failed somehow. Reset to
1556 // previous state.
1557 S.Current = FrameBefore;
1558 return false;
1559}
1560
1561bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1562 uint32_t VarArgSize) {
1563 assert(Func->hasThisPointer());
1564 assert(Func->isVirtual());
1565 size_t ArgSize = Func->getArgSize() + VarArgSize;
1566 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1567 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1568 const FunctionDecl *Callee = Func->getDecl();
1569
1570 if (!Func->isFullyCompiled())
1571 compileFunction(S, Func);
1572
1573 // C++2a [class.abstract]p6:
1574 // the effect of making a virtual call to a pure virtual function [...] is
1575 // undefined
1576 if (Callee->isPureVirtual()) {
1577 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1578 ExtraNotes: 1)
1579 << Callee;
1580 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1581 return false;
1582 }
1583
1584 const CXXRecordDecl *DynamicDecl = nullptr;
1585 {
1586 Pointer TypePtr = ThisPtr;
1587 while (TypePtr.isBaseClass())
1588 TypePtr = TypePtr.getBase();
1589
1590 QualType DynamicType = TypePtr.getType();
1591 if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1592 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1593 else
1594 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1595 }
1596 assert(DynamicDecl);
1597
1598 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1599 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1600 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1601 DynamicDecl, StaticDecl, InitialFunction);
1602
1603 if (Overrider != InitialFunction) {
1604 // DR1872: An instantiated virtual constexpr function can't be called in a
1605 // constant expression (prior to C++20). We can still constant-fold such a
1606 // call.
1607 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1608 const Expr *E = S.Current->getExpr(PC: OpPC);
1609 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1610 }
1611
1612 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1613
1614 const CXXRecordDecl *ThisFieldDecl =
1615 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1616 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1617 // If the function we call is further DOWN the hierarchy than the
1618 // FieldDesc of our pointer, just go up the hierarchy of this field
1619 // the furthest we can go.
1620 while (ThisPtr.isBaseClass())
1621 ThisPtr = ThisPtr.getBase();
1622 }
1623 }
1624
1625 if (!Call(S, OpPC, Func, VarArgSize))
1626 return false;
1627
1628 // Covariant return types. The return type of Overrider is a pointer
1629 // or reference to a class type.
1630 if (Overrider != InitialFunction &&
1631 Overrider->getReturnType()->isPointerOrReferenceType() &&
1632 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1633 QualType OverriderPointeeType =
1634 Overrider->getReturnType()->getPointeeType();
1635 QualType InitialPointeeType =
1636 InitialFunction->getReturnType()->getPointeeType();
1637 // We've called Overrider above, but calling code expects us to return what
1638 // InitialFunction returned. According to the rules for covariant return
1639 // types, what InitialFunction returns needs to be a base class of what
1640 // Overrider returns. So, we need to do an upcast here.
1641 unsigned Offset = S.getContext().collectBaseOffset(
1642 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1643 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1644 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1645 }
1646
1647 return true;
1648}
1649
1650bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1651 uint32_t BuiltinID) {
1652 // A little arbitrary, but the current interpreter allows evaluation
1653 // of builtin functions in this mode, with some exceptions.
1654 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1655 S.checkingPotentialConstantExpression())
1656 return false;
1657
1658 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1659}
1660
1661bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1662 const CallExpr *CE) {
1663 const Pointer &Ptr = S.Stk.pop<Pointer>();
1664
1665 if (Ptr.isZero()) {
1666 const auto *E = cast<CallExpr>(Val: S.Current->getExpr(PC: OpPC));
1667 S.FFDiag(E, DiagId: diag::note_constexpr_null_callee)
1668 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1669 return false;
1670 }
1671
1672 if (!Ptr.isFunctionPointer())
1673 return Invalid(S, OpPC);
1674
1675 const FunctionPointer &FuncPtr = Ptr.asFunctionPointer();
1676 const Function *F = FuncPtr.getFunction();
1677 assert(F);
1678 // Don't allow calling block pointers.
1679 if (!F->getDecl())
1680 return Invalid(S, OpPC);
1681
1682 // This happens when the call expression has been cast to
1683 // something else, but we don't support that.
1684 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1685 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1686 return false;
1687
1688 // Check argument nullability state.
1689 if (F->hasNonNullAttr()) {
1690 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1691 return false;
1692 }
1693
1694 assert(ArgSize >= F->getWrittenArgSize());
1695 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1696
1697 // We need to do this explicitly here since we don't have the necessary
1698 // information to do it automatically.
1699 if (F->isThisPointerExplicit())
1700 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1701
1702 if (F->isVirtual())
1703 return CallVirt(S, OpPC, Func: F, VarArgSize);
1704
1705 return Call(S, OpPC, Func: F, VarArgSize);
1706}
1707
1708static void startLifetimeRecurse(const Pointer &Ptr) {
1709 if (const Record *R = Ptr.getRecord()) {
1710 Ptr.startLifetime();
1711 for (const Record::Field &Fi : R->fields())
1712 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1713 return;
1714 }
1715
1716 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1717 FieldDesc->isCompositeArray()) {
1718 assert(Ptr.getLifetime() == Lifetime::Started);
1719 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1720 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1721 return;
1722 }
1723
1724 Ptr.startLifetime();
1725}
1726
1727bool StartLifetime(InterpState &S, CodePtr OpPC) {
1728 const auto &Ptr = S.Stk.peek<Pointer>();
1729 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1730 return false;
1731 startLifetimeRecurse(Ptr: Ptr.narrow());
1732 return true;
1733}
1734
1735// FIXME: It might be better to the recursing as part of the generated code for
1736// a destructor?
1737static void endLifetimeRecurse(const Pointer &Ptr) {
1738 if (const Record *R = Ptr.getRecord()) {
1739 Ptr.endLifetime();
1740 for (const Record::Field &Fi : R->fields())
1741 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1742 return;
1743 }
1744
1745 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1746 FieldDesc->isCompositeArray()) {
1747 // No endLifetime() for array roots.
1748 assert(Ptr.getLifetime() == Lifetime::Started);
1749 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1750 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1751 return;
1752 }
1753
1754 Ptr.endLifetime();
1755}
1756
1757/// Ends the lifetime of the peek'd pointer.
1758bool EndLifetime(InterpState &S, CodePtr OpPC) {
1759 const auto &Ptr = S.Stk.peek<Pointer>();
1760 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1761 return false;
1762 endLifetimeRecurse(Ptr: Ptr.narrow());
1763 return true;
1764}
1765
1766/// Ends the lifetime of the pop'd pointer.
1767bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
1768 const auto &Ptr = S.Stk.pop<Pointer>();
1769 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1770 return false;
1771 endLifetimeRecurse(Ptr: Ptr.narrow());
1772 return true;
1773}
1774
1775bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1776 std::optional<uint64_t> ArraySize) {
1777 const Pointer &Ptr = S.Stk.peek<Pointer>();
1778
1779 // Similar to CheckStore(), but with the additional CheckTemporary() call and
1780 // the AccessKinds are different.
1781 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Construct))
1782 return false;
1783 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
1784 return false;
1785 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Construct))
1786 return false;
1787
1788 // CheckLifetime for this and all base pointers.
1789 for (Pointer P = Ptr;;) {
1790 if (!CheckLifetime(S, OpPC, Ptr: P, AK: AK_Construct))
1791 return false;
1792
1793 if (P.isRoot())
1794 break;
1795 P = P.getBase();
1796 }
1797 if (!CheckExtern(S, OpPC, Ptr))
1798 return false;
1799 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
1800 return false;
1801 if (!CheckGlobal(S, OpPC, Ptr))
1802 return false;
1803 if (!CheckConst(S, OpPC, Ptr))
1804 return false;
1805 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
1806 return false;
1807
1808 if (!InvalidNewDeleteExpr(S, OpPC, E))
1809 return false;
1810
1811 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
1812 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
1813 const ASTContext &ASTCtx = S.getASTContext();
1814 QualType AllocType;
1815 if (ArraySize) {
1816 AllocType = ASTCtx.getConstantArrayType(
1817 EltTy: NewExpr->getAllocatedType(),
1818 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
1819 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
1820 } else {
1821 AllocType = NewExpr->getAllocatedType();
1822 }
1823
1824 unsigned StorageSize = 1;
1825 unsigned AllocSize = 1;
1826 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
1827 AllocSize = CAT->getZExtSize();
1828 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
1829 StorageSize = CAT->getZExtSize();
1830
1831 if (AllocSize > StorageSize ||
1832 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
1833 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
1834 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
1835 DiagId: diag::note_constexpr_placement_new_wrong_type)
1836 << StorageType << AllocType;
1837 return false;
1838 }
1839
1840 // Can't activate fields in a union, unless the direct base is the union.
1841 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
1842 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
1843
1844 return true;
1845}
1846
1847bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1848 assert(E);
1849
1850 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
1851 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1852
1853 if (NewExpr->getNumPlacementArgs() > 0) {
1854 // This is allowed pre-C++26, but only an std function.
1855 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction())
1856 return true;
1857 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
1858 << /*C++26 feature*/ 1 << E->getSourceRange();
1859 } else if (
1860 !OperatorNew
1861 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1862 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1863 DiagId: diag::note_constexpr_new_non_replaceable)
1864 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
1865 return false;
1866 } else if (!S.getLangOpts().CPlusPlus26 &&
1867 NewExpr->getNumPlacementArgs() == 1 &&
1868 !OperatorNew->isReservedGlobalPlacementOperator()) {
1869 if (!S.getLangOpts().CPlusPlus26) {
1870 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
1871 << /*Unsupported*/ 0 << E->getSourceRange();
1872 return false;
1873 }
1874 return true;
1875 }
1876 } else {
1877 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
1878 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1879 if (!OperatorDelete
1880 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1881 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1882 DiagId: diag::note_constexpr_new_non_replaceable)
1883 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
1884 return false;
1885 }
1886 }
1887
1888 return false;
1889}
1890
1891bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1892 const FixedPoint &FP) {
1893 const Expr *E = S.Current->getExpr(PC: OpPC);
1894 if (S.checkingForUndefinedBehavior()) {
1895 S.getASTContext().getDiagnostics().Report(
1896 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
1897 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
1898 }
1899 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
1900 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
1901 return S.noteUndefinedBehavior();
1902}
1903
1904bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1905 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1906 S.FFDiag(SI: Loc,
1907 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1908 << Index;
1909 return false;
1910}
1911
1912bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1913 const Pointer &Ptr, unsigned BitWidth) {
1914 if (Ptr.isDummy())
1915 return false;
1916 if (Ptr.isFunctionPointer())
1917 return true;
1918
1919 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1920 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1921 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1922
1923 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1924 // Only allow based lvalue casts if they are lossless.
1925 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
1926 BitWidth)
1927 return Invalid(S, OpPC);
1928 }
1929 return true;
1930}
1931
1932bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1933 const Pointer &Ptr = S.Stk.pop<Pointer>();
1934
1935 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1936 return false;
1937
1938 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
1939 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
1940
1941 S.Stk.push<IntegralAP<false>>(Args&: Result);
1942 return true;
1943}
1944
1945bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1946 const Pointer &Ptr = S.Stk.pop<Pointer>();
1947
1948 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1949 return false;
1950
1951 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
1952 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
1953
1954 S.Stk.push<IntegralAP<true>>(Args&: Result);
1955 return true;
1956}
1957
1958bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
1959 bool TargetIsUCharOrByte) {
1960 // This is always fine.
1961 if (!HasIndeterminateBits)
1962 return true;
1963
1964 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
1965 if (TargetIsUCharOrByte)
1966 return true;
1967
1968 const Expr *E = S.Current->getExpr(PC: OpPC);
1969 QualType ExprType = E->getType();
1970 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
1971 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
1972 return false;
1973}
1974
1975bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
1976 const Type *TypeInfoType) {
1977 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
1978 return true;
1979}
1980
1981bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
1982 const auto &P = S.Stk.pop<Pointer>();
1983
1984 if (!P.isBlockPointer())
1985 return false;
1986
1987 // Pick the most-derived type.
1988 const Type *T = P.getDeclPtr().getType().getTypePtr();
1989 // ... unless we're currently constructing this object.
1990 // FIXME: We have a similar check to this in more places.
1991 if (S.Current->getFunction()) {
1992 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
1993 if (const Function *Func = Frame->getFunction();
1994 Func && (Func->isConstructor() || Func->isDestructor()) &&
1995 P.block() == Frame->getThis().block()) {
1996 T = Func->getParentDecl()->getTypeForDecl();
1997 break;
1998 }
1999 }
2000 }
2001
2002 S.Stk.push<Pointer>(Args: T->getCanonicalTypeUnqualified().getTypePtr(),
2003 Args&: TypeInfoType);
2004 return true;
2005}
2006
2007bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2008 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2009 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2010 << E->getExprOperand()->getType()
2011 << E->getExprOperand()->getSourceRange();
2012 return false;
2013}
2014
2015bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2016 const Pointer &RHS) {
2017 unsigned LHSOffset = LHS.getIndex();
2018 unsigned RHSOffset = RHS.getIndex();
2019 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2020 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2021
2022 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2023 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2024 int32_t IndexDiff = RHSOffset - LHSOffset;
2025 if (IndexDiff < 0) {
2026 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2027 return false;
2028 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2029 } else {
2030 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2031 return false;
2032 RHSStr = RHSStr.drop_front(N: IndexDiff);
2033 }
2034
2035 unsigned ShorterCharWidth;
2036 StringRef Shorter;
2037 StringRef Longer;
2038 if (LHSLength < RHSLength) {
2039 ShorterCharWidth = LHS.elemSize();
2040 Shorter = LHSStr;
2041 Longer = RHSStr;
2042 } else {
2043 ShorterCharWidth = RHS.elemSize();
2044 Shorter = RHSStr;
2045 Longer = LHSStr;
2046 }
2047
2048 // The null terminator isn't included in the string data, so check for it
2049 // manually. If the longer string doesn't have a null terminator where the
2050 // shorter string ends, they aren't potentially overlapping.
2051 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2052 if (Shorter.size() + NullByte >= Longer.size())
2053 break;
2054 if (Longer[Shorter.size() + NullByte])
2055 return false;
2056 }
2057 return Shorter == Longer.take_front(N: Shorter.size());
2058}
2059
2060static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2061 PrimType T) {
2062
2063 if (T == PT_IntAPS) {
2064 auto &Val = Ptr.deref<IntegralAP<true>>();
2065 if (!Val.singleWord()) {
2066 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2067 Val.take(NewMemory);
2068 }
2069 } else if (T == PT_IntAP) {
2070 auto &Val = Ptr.deref<IntegralAP<false>>();
2071 if (!Val.singleWord()) {
2072 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2073 Val.take(NewMemory);
2074 }
2075 } else if (T == PT_Float) {
2076 auto &Val = Ptr.deref<Floating>();
2077 if (!Val.singleWord()) {
2078 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2079 Val.take(NewMemory);
2080 }
2081 }
2082}
2083
2084template <typename T>
2085static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2086 assert(needsAlloc<T>());
2087 auto &Val = Ptr.deref<T>();
2088 if (!Val.singleWord()) {
2089 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2090 Val.take(NewMemory);
2091 }
2092}
2093
2094static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2095 if (const Record *R = Ptr.getRecord()) {
2096 for (const Record::Field &Fi : R->fields()) {
2097 if (Fi.Desc->isPrimitive()) {
2098 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2099 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2100 });
2101 copyPrimitiveMemory(S, Ptr: Ptr.atField(Off: Fi.Offset), T: Fi.Desc->getPrimType());
2102 } else
2103 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2104 }
2105 return;
2106 }
2107
2108 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2109 unsigned NumElems = D->getNumElems();
2110 if (NumElems == 0)
2111 return;
2112
2113 if (D->isPrimitiveArray()) {
2114 PrimType PT = D->getPrimType();
2115 if (!needsAlloc(T: PT))
2116 return;
2117 assert(NumElems >= 1);
2118 const Pointer EP = Ptr.atIndex(Idx: 0);
2119 bool AllSingleWord = true;
2120 TYPE_SWITCH_ALLOC(PT, {
2121 if (!EP.deref<T>().singleWord()) {
2122 copyPrimitiveMemory<T>(S, EP);
2123 AllSingleWord = false;
2124 }
2125 });
2126 if (AllSingleWord)
2127 return;
2128 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2129 const Pointer EP = Ptr.atIndex(Idx: I);
2130 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2131 }
2132 } else {
2133 assert(D->isCompositeArray());
2134 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2135 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2136 finishGlobalRecurse(S, Ptr: EP);
2137 }
2138 }
2139 }
2140}
2141
2142bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2143 const Pointer &Ptr = S.Stk.pop<Pointer>();
2144
2145 finishGlobalRecurse(S, Ptr);
2146 if (Ptr.canBeInitialized()) {
2147 Ptr.initialize();
2148 Ptr.activate();
2149 }
2150
2151 return true;
2152}
2153
2154// https://github.com/llvm/llvm-project/issues/102513
2155#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2156#pragma optimize("", off)
2157#endif
2158bool Interpret(InterpState &S) {
2159 // The current stack frame when we started Interpret().
2160 // This is being used by the ops to determine wheter
2161 // to return from this function and thus terminate
2162 // interpretation.
2163 const InterpFrame *StartFrame = S.Current;
2164 assert(!S.Current->isRoot());
2165 CodePtr PC = S.Current->getPC();
2166
2167 // Empty program.
2168 if (!PC)
2169 return true;
2170
2171 for (;;) {
2172 auto Op = PC.read<Opcode>();
2173 CodePtr OpPC = PC;
2174
2175 switch (Op) {
2176#define GET_INTERP
2177#include "Opcodes.inc"
2178#undef GET_INTERP
2179 }
2180 }
2181}
2182// https://github.com/llvm/llvm-project/issues/102513
2183#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2184#pragma optimize("", on)
2185#endif
2186
2187} // namespace interp
2188} // namespace clang
2189