1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31#if __has_cpp_attribute(clang::musttail)
32#define MUSTTAIL [[clang::musttail]]
33#elif __has_cpp_attribute(msvc::musttail)
34#define MUSTTAIL [[msvc::musttail]]
35#elif __has_attribute(musttail)
36#define MUSTTAIL __attribute__((musttail))
37#endif
38
39// On MSVC, musttail does not guarantee tail calls in debug mode.
40// We disable it on MSVC generally since it doesn't seem to be able
41// to handle the way we use tailcalls.
42// PPC can't tail-call external calls, which is a problem for InterpNext.
43#if defined(_MSC_VER) || defined(__powerpc__) || !defined(MUSTTAIL) || \
44 defined(__i386__) || defined(__sparc__)
45#undef MUSTTAIL
46#define MUSTTAIL
47#define USE_TAILCALLS 0
48#else
49#define USE_TAILCALLS 1
50#endif
51
52PRESERVE_NONE static bool RetValue(InterpState &S, CodePtr &Ptr) {
53 llvm::report_fatal_error(reason: "Interpreter cannot return values");
54}
55
56//===----------------------------------------------------------------------===//
57// Jmp, Jt, Jf
58//===----------------------------------------------------------------------===//
59
60static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
61 PC += Offset;
62 return S.noteStep(OpPC: PC);
63}
64
65static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
66 if (S.Stk.pop<bool>()) {
67 PC += Offset;
68 }
69 return S.noteStep(OpPC: PC);
70}
71
72static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
73 if (!S.Stk.pop<bool>()) {
74 PC += Offset;
75 }
76 return S.noteStep(OpPC: PC);
77}
78
79static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
80 const ValueDecl *VD) {
81 const SourceInfo &E = S.Current->getSource(PC: OpPC);
82 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
83 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
84}
85
86static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
87 const ValueDecl *VD);
88static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
89 const ValueDecl *D) {
90 // This function tries pretty hard to produce a good diagnostic. Just skip
91 // that if nobody will see it anyway.
92 if (!S.diagnosing())
93 return false;
94
95 if (isa<ParmVarDecl>(Val: D)) {
96 if (D->getType()->isReferenceType()) {
97 if (S.inConstantContext() && S.getLangOpts().CPlusPlus &&
98 !S.getLangOpts().CPlusPlus11) {
99 diagnoseNonConstVariable(S, OpPC, VD: D);
100 return false;
101 }
102 }
103
104 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
105 if (S.getLangOpts().CPlusPlus23 && D->getType()->isReferenceType()) {
106 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_unknown_variable, ExtraNotes: 1)
107 << AK_Read << D;
108 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
109 } else if (S.getLangOpts().CPlusPlus11) {
110 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown, ExtraNotes: 1) << D;
111 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
112 } else {
113 S.FFDiag(SI: Loc);
114 }
115 return false;
116 }
117
118 if (!D->getType().isConstQualified()) {
119 diagnoseNonConstVariable(S, OpPC, VD: D);
120 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
121 if (!VD->getAnyInitializer()) {
122 diagnoseMissingInitializer(S, OpPC, VD);
123 } else {
124 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
125 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
126 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
127 }
128 }
129
130 return false;
131}
132
133static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
134 const ValueDecl *VD) {
135 if (!S.diagnosing())
136 return;
137
138 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
139 if (!S.getLangOpts().CPlusPlus) {
140 S.FFDiag(SI: Loc);
141 return;
142 }
143
144 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
145 VarD && VarD->getType().isConstQualified() &&
146 !VarD->getAnyInitializer()) {
147 diagnoseMissingInitializer(S, OpPC, VD);
148 return;
149 }
150
151 // Rather random, but this is to match the diagnostic output of the current
152 // interpreter.
153 if (isa<ObjCIvarDecl>(Val: VD))
154 return;
155
156 if (VD->getType()->isIntegralOrEnumerationType()) {
157 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
158 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
159 return;
160 }
161
162 S.FFDiag(SI: Loc,
163 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
164 : diag::note_constexpr_ltor_non_integral,
165 ExtraNotes: 1)
166 << VD << VD->getType();
167 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
168}
169
170static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Block *B,
171 AccessKinds AK) {
172 if (B->getDeclID()) {
173 if (!(B->isStatic() && B->isTemporary()))
174 return true;
175
176 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
177 Val: B->getDescriptor()->asExpr());
178 if (!MTE)
179 return true;
180
181 // FIXME(perf): Since we do this check on every Load from a static
182 // temporary, it might make sense to cache the value of the
183 // isUsableInConstantExpressions call.
184 if (B->getEvalID() != S.EvalID &&
185 !MTE->isUsableInConstantExpressions(Context: S.getASTContext())) {
186 const SourceInfo &E = S.Current->getSource(PC: OpPC);
187 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
188 S.Note(Loc: B->getDescriptor()->getLocation(),
189 DiagId: diag::note_constexpr_temporary_here);
190 return false;
191 }
192 }
193 return true;
194}
195
196static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
197 if (auto ID = Ptr.getDeclID()) {
198 if (!Ptr.isStatic())
199 return true;
200
201 if (S.P.getCurrentDecl() == ID)
202 return true;
203
204 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
205 return false;
206 }
207 return true;
208}
209
210namespace clang {
211namespace interp {
212PRESERVE_NONE static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset,
213 PrimType PT);
214
215static void popArg(InterpState &S, const Expr *Arg) {
216 PrimType Ty = S.getContext().classify(E: Arg).value_or(PT: PT_Ptr);
217 TYPE_SWITCH(Ty, S.Stk.discard<T>());
218}
219
220void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
221 const Function *Func) {
222 assert(S.Current);
223 assert(Func);
224
225 if (S.Current->Caller && Func->isVariadic()) {
226 // CallExpr we're look for is at the return PC of the current function, i.e.
227 // in the caller.
228 // This code path should be executed very rarely.
229 unsigned NumVarArgs;
230 const Expr *const *Args = nullptr;
231 unsigned NumArgs = 0;
232 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
233 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
234 Args = CE->getArgs();
235 NumArgs = CE->getNumArgs();
236 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
237 Args = CE->getArgs();
238 NumArgs = CE->getNumArgs();
239 } else
240 assert(false && "Can't get arguments from that expression type");
241
242 assert(NumArgs >= Func->getNumWrittenParams());
243 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
244 isa<CXXOperatorCallExpr>(Val: CallSite));
245 for (unsigned I = 0; I != NumVarArgs; ++I) {
246 const Expr *A = Args[NumArgs - 1 - I];
247 popArg(S, Arg: A);
248 }
249 }
250
251 // And in any case, remove the fixed parameters (the non-variadic ones)
252 // at the end.
253 for (const Function::ParamDescriptor &PDesc : Func->args_reverse())
254 TYPE_SWITCH(PDesc.T, S.Stk.discard<T>());
255
256 if (Func->hasThisPointer() && !Func->isThisPointerExplicit())
257 S.Stk.discard<Pointer>();
258 if (Func->hasRVO())
259 S.Stk.discard<Pointer>();
260}
261
262bool isConstexprUnknown(const Pointer &P) {
263 if (!P.isBlockPointer())
264 return false;
265
266 if (P.isDummy())
267 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
268
269 return P.getDeclDesc()->IsConstexprUnknown;
270}
271
272bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
273 if (Ptr.isDummy())
274 return false;
275 if (Ptr.isZero())
276 return true;
277 if (Ptr.isFunctionPointer())
278 return false;
279 if (Ptr.isIntegralPointer())
280 return true;
281 if (Ptr.isTypeidPointer())
282 return true;
283
284 if (Ptr.getType()->isAnyComplexType())
285 return true;
286
287 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
288 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
289 return false;
290}
291
292bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
293 AccessKinds AK, bool WillActivate) {
294 if (Ptr.isActive())
295 return true;
296
297 assert(Ptr.inUnion());
298
299 // Find the outermost union.
300 Pointer U = Ptr.getBase();
301 Pointer C = Ptr;
302 while (!U.isRoot() && !U.isActive()) {
303 // A little arbitrary, but this is what the current interpreter does.
304 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
305 // GCC's output is more similar to what we would get without
306 // this condition.
307 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
308 break;
309
310 C = U;
311 U = U.getBase();
312 }
313 assert(C.isField());
314 assert(C.getBase() == U);
315
316 // Consider:
317 // union U {
318 // struct {
319 // int x;
320 // int y;
321 // } a;
322 // }
323 //
324 // When activating x, we will also activate a. If we now try to read
325 // from y, we will get to CheckActive, because y is not active. In that
326 // case, our U will be a (not a union). We return here and let later code
327 // handle this.
328 if (!U.getFieldDesc()->isUnion())
329 return true;
330
331 // When we will activate Ptr, check that none of the unions in its path have a
332 // non-trivial default constructor.
333 if (WillActivate) {
334 bool Fails = false;
335 Pointer It = Ptr;
336 while (!It.isRoot() && !It.isActive()) {
337 if (const Record *R = It.getRecord(); R && R->isUnion()) {
338 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(Val: R->getDecl());
339 CXXRD && !CXXRD->hasTrivialDefaultConstructor()) {
340 Fails = true;
341 break;
342 }
343 }
344 It = It.getBase();
345 }
346 if (!Fails)
347 return true;
348 }
349
350 // Get the inactive field descriptor.
351 assert(!C.isActive());
352 const FieldDecl *InactiveField = C.getField();
353 assert(InactiveField);
354
355 // Find the active field of the union.
356 const Record *R = U.getRecord();
357 assert(R && R->isUnion() && "Not a union");
358
359 const FieldDecl *ActiveField = nullptr;
360 for (const Record::Field &F : R->fields()) {
361 const Pointer &Field = U.atField(Off: F.Offset);
362 if (Field.isActive()) {
363 ActiveField = Field.getField();
364 break;
365 }
366 }
367
368 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
369 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
370 << AK << InactiveField << !ActiveField << ActiveField;
371 return false;
372}
373
374bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
375 if (!Ptr.isExtern())
376 return true;
377
378 if (!Ptr.isPastEnd() &&
379 (Ptr.isInitialized() ||
380 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)))
381 return true;
382
383 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
384 Ptr.isConst())
385 return false;
386
387 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
388 diagnoseNonConstVariable(S, OpPC, VD);
389 return false;
390}
391
392bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
393 if (!Ptr.isUnknownSizeArray())
394 return true;
395 const SourceInfo &E = S.Current->getSource(PC: OpPC);
396 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
397 return false;
398}
399
400bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
401 AccessKinds AK) {
402 if (Ptr.isZero()) {
403 const auto &Src = S.Current->getSource(PC: OpPC);
404
405 if (Ptr.isField())
406 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
407 else
408 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
409
410 return false;
411 }
412
413 if (!Ptr.isLive()) {
414 const auto &Src = S.Current->getSource(PC: OpPC);
415
416 if (Ptr.isDynamic()) {
417 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
418 } else if (!S.checkingPotentialConstantExpression()) {
419 bool IsTemp = Ptr.isTemporary();
420 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_uninit)
421 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
422
423 if (IsTemp)
424 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
425 else
426 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
427 }
428
429 return false;
430 }
431
432 return true;
433}
434
435bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
436 assert(Desc);
437
438 const auto *D = Desc->asVarDecl();
439 if (!D || D == S.EvaluatingDecl || D->isConstexpr())
440 return true;
441
442 // If we're evaluating the initializer for a constexpr variable in C23, we may
443 // only read other contexpr variables. Abort here since this one isn't
444 // constexpr.
445 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
446 VD && VD->isConstexpr() && S.getLangOpts().C23)
447 return Invalid(S, OpPC);
448
449 QualType T = D->getType();
450 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
451 if (T->isIntegralOrEnumerationType()) {
452 if (!IsConstant) {
453 diagnoseNonConstVariable(S, OpPC, VD: D);
454 return false;
455 }
456 return true;
457 }
458
459 if (IsConstant) {
460 if (S.getLangOpts().CPlusPlus) {
461 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
462 DiagId: S.getLangOpts().CPlusPlus11
463 ? diag::note_constexpr_ltor_non_constexpr
464 : diag::note_constexpr_ltor_non_integral,
465 ExtraNotes: 1)
466 << D << T;
467 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
468 } else {
469 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
470 }
471 return true;
472 }
473
474 if (T->isPointerOrReferenceType()) {
475 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
476 !S.getLangOpts().CPlusPlus11) {
477 diagnoseNonConstVariable(S, OpPC, VD: D);
478 return false;
479 }
480 return true;
481 }
482
483 diagnoseNonConstVariable(S, OpPC, VD: D);
484 return false;
485}
486
487static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
488 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
489 return true;
490 if (!Ptr.getDeclID())
491 return true;
492 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
493}
494
495bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
496 CheckSubobjectKind CSK) {
497 if (!Ptr.isZero())
498 return true;
499 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
500 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
501 << CSK << S.Current->getRange(PC: OpPC);
502
503 return false;
504}
505
506bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
507 AccessKinds AK) {
508 if (!Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray())
509 return true;
510 if (S.getLangOpts().CPlusPlus) {
511 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
512 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
513 << AK << S.Current->getRange(PC: OpPC);
514 }
515 return false;
516}
517
518bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
519 CheckSubobjectKind CSK) {
520 if (!Ptr.isElementPastEnd() && !Ptr.isZeroSizeArray())
521 return true;
522 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
523 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
524 << CSK << S.Current->getRange(PC: OpPC);
525 return false;
526}
527
528bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
529 CheckSubobjectKind CSK) {
530 if (!Ptr.isOnePastEnd())
531 return true;
532
533 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
534 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
535 << CSK << S.Current->getRange(PC: OpPC);
536 return false;
537}
538
539bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
540 uint32_t Offset) {
541 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
542 uint32_t PtrOffset = Ptr.getByteOffset();
543
544 // We subtract Offset from PtrOffset. The result must be at least
545 // MinOffset.
546 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
547 return true;
548
549 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
550 QualType TargetQT = E->getType()->getPointeeType();
551 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
552
553 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
554 << MostDerivedQT << TargetQT;
555
556 return false;
557}
558
559bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
560 assert(Ptr.isLive() && "Pointer is not live");
561 if (!Ptr.isConst())
562 return true;
563
564 if (Ptr.isMutable() && !Ptr.isConstInMutable())
565 return true;
566
567 if (!Ptr.isBlockPointer())
568 return false;
569
570 // The This pointer is writable in constructors and destructors,
571 // even if isConst() returns true.
572 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
573 return true;
574
575 const QualType Ty = Ptr.getType();
576 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
577 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
578 return false;
579}
580
581bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
582 assert(Ptr.isLive() && "Pointer is not live");
583 if (!Ptr.isMutable())
584 return true;
585
586 // In C++14 onwards, it is permitted to read a mutable member whose
587 // lifetime began within the evaluation.
588 if (S.getLangOpts().CPlusPlus14 && Ptr.block()->getEvalID() == S.EvalID)
589 return true;
590
591 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
592 const FieldDecl *Field = Ptr.getField();
593 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
594 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
595 return false;
596}
597
598static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
599 AccessKinds AK) {
600 assert(Ptr.isLive());
601
602 if (!Ptr.isVolatile())
603 return true;
604
605 if (!S.getLangOpts().CPlusPlus)
606 return Invalid(S, OpPC);
607
608 // Volatile object can be written-to and read if they are being constructed.
609 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
610 return true;
611
612 // The reason why Ptr is volatile might be further up the hierarchy.
613 // Find that pointer.
614 Pointer P = Ptr;
615 while (!P.isRoot()) {
616 if (P.getType().isVolatileQualified())
617 break;
618 P = P.getBase();
619 }
620
621 const NamedDecl *ND = nullptr;
622 int DiagKind;
623 SourceLocation Loc;
624 if (const auto *F = P.getField()) {
625 DiagKind = 2;
626 Loc = F->getLocation();
627 ND = F;
628 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
629 DiagKind = 1;
630 Loc = VD->getLocation();
631 ND = VD;
632 } else {
633 DiagKind = 0;
634 if (const auto *E = P.getFieldDesc()->asExpr())
635 Loc = E->getExprLoc();
636 }
637
638 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
639 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
640 << AK << DiagKind << ND;
641 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
642 return false;
643}
644
645bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
646 AccessKinds AK) {
647 assert(Ptr.isLive());
648 assert(!Ptr.isInitialized());
649 return DiagnoseUninitialized(S, OpPC, Extern: Ptr.isExtern(), Desc: Ptr.getDeclDesc(), AK);
650}
651
652bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, bool Extern,
653 const Descriptor *Desc, AccessKinds AK) {
654 if (Extern && S.checkingPotentialConstantExpression())
655 return false;
656
657 if (const auto *VD = Desc->asVarDecl();
658 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
659
660 if (VD == S.EvaluatingDecl &&
661 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
662 if (!S.getLangOpts().CPlusPlus14 &&
663 !VD->getType().isConstant(Ctx: S.getASTContext())) {
664 // Diagnose as non-const read.
665 diagnoseNonConstVariable(S, OpPC, VD);
666 } else {
667 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
668 // Diagnose as "read of object outside its lifetime".
669 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
670 << AK << /*IsIndeterminate=*/false;
671 }
672 return false;
673 }
674
675 if (VD->getAnyInitializer()) {
676 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
677 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
678 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
679 } else {
680 diagnoseMissingInitializer(S, OpPC, VD);
681 }
682 return false;
683 }
684
685 if (!S.checkingPotentialConstantExpression()) {
686 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
687 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
688 }
689 return false;
690}
691
692static bool CheckLifetime(InterpState &S, CodePtr OpPC, Lifetime LT,
693 AccessKinds AK) {
694 if (LT == Lifetime::Started)
695 return true;
696
697 if (!S.checkingPotentialConstantExpression()) {
698 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
699 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
700 }
701 return false;
702}
703
704static bool CheckWeak(InterpState &S, CodePtr OpPC, const Block *B) {
705 if (!B->isWeak())
706 return true;
707
708 const auto *VD = B->getDescriptor()->asVarDecl();
709 assert(VD);
710 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
711 << VD;
712 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
713
714 return false;
715}
716
717// The list of checks here is just the one from CheckLoad, but with the
718// ones removed that are impossible on primitive global values.
719// For example, since those can't be members of structs, they also can't
720// be mutable.
721bool CheckGlobalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
722 const auto &Desc = B->getBlockDesc<GlobalInlineDescriptor>();
723 if (!B->isAccessible()) {
724 if (!CheckExtern(S, OpPC, Ptr: Pointer(const_cast<Block *>(B))))
725 return false;
726 if (!CheckDummy(S, OpPC, B, AK: AK_Read))
727 return false;
728 return CheckWeak(S, OpPC, B);
729 }
730
731 if (!CheckConstant(S, OpPC, Desc: B->getDescriptor()))
732 return false;
733 if (Desc.InitState != GlobalInitState::Initialized)
734 return DiagnoseUninitialized(S, OpPC, Extern: B->isExtern(), Desc: B->getDescriptor(),
735 AK: AK_Read);
736 if (!CheckTemporary(S, OpPC, B, AK: AK_Read))
737 return false;
738 if (B->getDescriptor()->IsVolatile) {
739 if (!S.getLangOpts().CPlusPlus)
740 return Invalid(S, OpPC);
741
742 const ValueDecl *D = B->getDescriptor()->asValueDecl();
743 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
744 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
745 << AK_Read << 1 << D;
746 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
747 return false;
748 }
749 return true;
750}
751
752// Similarly, for local loads.
753bool CheckLocalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
754 assert(!B->isExtern());
755 const auto &Desc = *reinterpret_cast<const InlineDescriptor *>(B->rawData());
756 if (!CheckLifetime(S, OpPC, LT: Desc.LifeState, AK: AK_Read))
757 return false;
758 if (!Desc.IsInitialized)
759 return DiagnoseUninitialized(S, OpPC, /*Extern=*/false, Desc: B->getDescriptor(),
760 AK: AK_Read);
761 if (B->getDescriptor()->IsVolatile) {
762 if (!S.getLangOpts().CPlusPlus)
763 return Invalid(S, OpPC);
764
765 const ValueDecl *D = B->getDescriptor()->asValueDecl();
766 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
767 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
768 << AK_Read << 1 << D;
769 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
770 return false;
771 }
772 return true;
773}
774
775bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
776 AccessKinds AK) {
777 if (Ptr.isZero()) {
778 const auto &Src = S.Current->getSource(PC: OpPC);
779
780 if (Ptr.isField())
781 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
782 else
783 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
784 return false;
785 }
786 // Block pointers are the only ones we can actually read from.
787 if (!Ptr.isBlockPointer())
788 return false;
789
790 if (!Ptr.block()->isAccessible()) {
791 if (!CheckLive(S, OpPC, Ptr, AK))
792 return false;
793 if (!CheckExtern(S, OpPC, Ptr))
794 return false;
795 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK))
796 return false;
797 return CheckWeak(S, OpPC, B: Ptr.block());
798 }
799
800 if (!CheckConstant(S, OpPC, Ptr))
801 return false;
802 if (!CheckRange(S, OpPC, Ptr, AK))
803 return false;
804 if (!CheckActive(S, OpPC, Ptr, AK))
805 return false;
806 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK))
807 return false;
808 if (!Ptr.isInitialized())
809 return DiagnoseUninitialized(S, OpPC, Ptr, AK);
810 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK))
811 return false;
812
813 if (!CheckMutable(S, OpPC, Ptr))
814 return false;
815 if (!CheckVolatile(S, OpPC, Ptr, AK))
816 return false;
817 if (!Ptr.isConst() && !S.inConstantContext() && isConstexprUnknown(P: Ptr))
818 return false;
819 return true;
820}
821
822/// This is not used by any of the opcodes directly. It's used by
823/// EvalEmitter to do the final lvalue-to-rvalue conversion.
824bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
825 assert(!Ptr.isZero());
826 if (!Ptr.isBlockPointer())
827 return false;
828
829 if (!Ptr.block()->isAccessible()) {
830 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
831 return false;
832 if (!CheckExtern(S, OpPC, Ptr))
833 return false;
834 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Read))
835 return false;
836 return CheckWeak(S, OpPC, B: Ptr.block());
837 }
838
839 if (!CheckConstant(S, OpPC, Ptr))
840 return false;
841
842 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
843 return false;
844 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Read))
845 return false;
846 if (!Ptr.isInitialized())
847 return DiagnoseUninitialized(S, OpPC, Ptr, AK: AK_Read);
848 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Read))
849 return false;
850 if (!CheckMutable(S, OpPC, Ptr))
851 return false;
852 return true;
853}
854
855bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
856 bool WillBeActivated) {
857 if (!Ptr.isBlockPointer() || Ptr.isZero())
858 return false;
859
860 if (!Ptr.block()->isAccessible()) {
861 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
862 return false;
863 if (!CheckExtern(S, OpPC, Ptr))
864 return false;
865 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Assign);
866 }
867 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Assign))
868 return false;
869 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
870 return false;
871 if (!CheckActive(S, OpPC, Ptr, AK: AK_Assign, WillActivate: WillBeActivated))
872 return false;
873 if (!CheckGlobal(S, OpPC, Ptr))
874 return false;
875 if (!CheckConst(S, OpPC, Ptr))
876 return false;
877 if (!CheckVolatile(S, OpPC, Ptr, AK: AK_Assign))
878 return false;
879 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
880 return false;
881 return true;
882}
883
884static bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
885 if (!Ptr.isDummy() && !isConstexprUnknown(P: Ptr)) {
886 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
887 return false;
888 if (!CheckExtern(S, OpPC, Ptr))
889 return false;
890 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
891 return false;
892 }
893 return true;
894}
895
896bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
897 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
898 return false;
899 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
900 return false;
901 return true;
902}
903
904static bool diagnoseCallableDecl(InterpState &S, CodePtr OpPC,
905 const FunctionDecl *DiagDecl) {
906 // Bail out if the function declaration itself is invalid. We will
907 // have produced a relevant diagnostic while parsing it, so just
908 // note the problematic sub-expression.
909 if (DiagDecl->isInvalidDecl())
910 return Invalid(S, OpPC);
911
912 // Diagnose failed assertions specially.
913 if (S.Current->getLocation(PC: OpPC).isMacroID() && DiagDecl->getIdentifier()) {
914 // FIXME: Instead of checking for an implementation-defined function,
915 // check and evaluate the assert() macro.
916 StringRef Name = DiagDecl->getName();
917 bool AssertFailed =
918 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
919 if (AssertFailed) {
920 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
921 DiagId: diag::note_constexpr_assert_failed);
922 return false;
923 }
924 }
925
926 if (!S.getLangOpts().CPlusPlus11) {
927 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
928 DiagId: diag::note_invalid_subexpr_in_const_expr);
929 return false;
930 }
931
932 // Invalid decls have been diagnosed before.
933 if (DiagDecl->isInvalidDecl())
934 return false;
935
936 // If this function is not constexpr because it is an inherited
937 // non-constexpr constructor, diagnose that directly.
938 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
939 if (CD && CD->isInheritingConstructor()) {
940 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
941 if (!Inherited->isConstexpr())
942 DiagDecl = CD = Inherited;
943 }
944
945 // Silently reject constructors of invalid classes. The invalid class
946 // has been rejected elsewhere before.
947 if (CD && CD->getParent()->isInvalidDecl())
948 return false;
949
950 // FIXME: If DiagDecl is an implicitly-declared special member function
951 // or an inheriting constructor, we should be much more explicit about why
952 // it's not constexpr.
953 if (CD && CD->isInheritingConstructor()) {
954 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_invalid_inhctor,
955 ExtraNotes: 1)
956 << CD->getInheritedConstructor().getConstructor()->getParent();
957 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
958 } else {
959 // Don't emit anything if the function isn't defined and we're checking
960 // for a constant expression. It might be defined at the point we're
961 // actually calling it.
962 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
963 bool IsDefined = DiagDecl->isDefined();
964 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
965 S.checkingPotentialConstantExpression())
966 return false;
967
968 // If the declaration is defined, declared 'constexpr' _and_ has a body,
969 // the below diagnostic doesn't add anything useful.
970 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && DiagDecl->hasBody())
971 return false;
972
973 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
974 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
975 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
976
977 if (DiagDecl->getDefinition())
978 S.Note(Loc: DiagDecl->getDefinition()->getLocation(), DiagId: diag::note_declared_at);
979 else
980 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
981 }
982
983 return false;
984}
985
986static bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
987 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
988 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
989 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
990 return false;
991 }
992
993 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
994 return false;
995
996 if (F->isValid() && F->hasBody() &&
997 (F->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
998 F->getDecl()->hasAttr<MSConstexprAttr>())))
999 return true;
1000
1001 const FunctionDecl *DiagDecl = F->getDecl();
1002 const FunctionDecl *Definition = nullptr;
1003 DiagDecl->getBody(Definition);
1004
1005 if (!Definition && S.checkingPotentialConstantExpression() &&
1006 DiagDecl->isConstexpr()) {
1007 return false;
1008 }
1009
1010 // Implicitly constexpr.
1011 if (F->isLambdaStaticInvoker())
1012 return true;
1013
1014 return diagnoseCallableDecl(S, OpPC, DiagDecl);
1015}
1016
1017static bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
1018 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
1019 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1020 DiagId: diag::note_constexpr_depth_limit_exceeded)
1021 << S.getLangOpts().ConstexprCallDepth;
1022 return false;
1023 }
1024
1025 return true;
1026}
1027
1028bool CheckThis(InterpState &S, CodePtr OpPC) {
1029 if (S.Current->hasThisPointer())
1030 return true;
1031
1032 const Expr *E = S.Current->getExpr(PC: OpPC);
1033 if (S.getLangOpts().CPlusPlus11) {
1034 bool IsImplicit = false;
1035 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
1036 IsImplicit = TE->isImplicit();
1037 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
1038 } else {
1039 S.FFDiag(E);
1040 }
1041
1042 return false;
1043}
1044
1045bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
1046 APFloat::opStatus Status, FPOptions FPO) {
1047 // [expr.pre]p4:
1048 // If during the evaluation of an expression, the result is not
1049 // mathematically defined [...], the behavior is undefined.
1050 // FIXME: C++ rules require us to not conform to IEEE 754 here.
1051 if (Result.isNan()) {
1052 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1053 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
1054 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
1055 return S.noteUndefinedBehavior();
1056 }
1057
1058 // In a constant context, assume that any dynamic rounding mode or FP
1059 // exception state matches the default floating-point environment.
1060 if (S.inConstantContext())
1061 return true;
1062
1063 if ((Status & APFloat::opInexact) &&
1064 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
1065 // Inexact result means that it depends on rounding mode. If the requested
1066 // mode is dynamic, the evaluation cannot be made in compile time.
1067 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1068 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
1069 return false;
1070 }
1071
1072 if ((Status != APFloat::opOK) &&
1073 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
1074 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
1075 FPO.getAllowFEnvAccess())) {
1076 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1077 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1078 return false;
1079 }
1080
1081 if ((Status & APFloat::opStatus::opInvalidOp) &&
1082 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1083 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1084 // There is no usefully definable result.
1085 S.FFDiag(SI: E);
1086 return false;
1087 }
1088
1089 return true;
1090}
1091
1092bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1093 if (S.getLangOpts().CPlusPlus20)
1094 return true;
1095
1096 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1097 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1098 return true;
1099}
1100
1101bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1102 DynamicAllocator::Form AllocForm,
1103 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1104 const Expr *NewExpr) {
1105 if (AllocForm == DeleteForm)
1106 return true;
1107
1108 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1109
1110 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1111 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1112 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1113 << TypeToDiagnose;
1114 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1115 << NewExpr->getSourceRange();
1116 return false;
1117}
1118
1119bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1120 const Pointer &Ptr) {
1121 // Regular new type(...) call.
1122 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1123 return true;
1124 // operator new.
1125 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1126 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1127 return true;
1128 // std::allocator.allocate() call
1129 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1130 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1131 return true;
1132
1133 // Whatever this is, we didn't heap allocate it.
1134 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1135 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1136 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1137
1138 if (Ptr.isTemporary())
1139 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1140 else
1141 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1142 return false;
1143}
1144
1145/// We aleady know the given DeclRefExpr is invalid for some reason,
1146/// now figure out why and print appropriate diagnostics.
1147bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1148 const ValueDecl *D = DR->getDecl();
1149 return diagnoseUnknownDecl(S, OpPC, D);
1150}
1151
1152bool InvalidDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR,
1153 bool InitializerFailed) {
1154 assert(DR);
1155
1156 if (InitializerFailed) {
1157 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1158 const auto *VD = cast<VarDecl>(Val: DR->getDecl());
1159 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
1160 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
1161 return false;
1162 }
1163
1164 return CheckDeclRef(S, OpPC, DR);
1165}
1166
1167bool CheckDummy(InterpState &S, CodePtr OpPC, const Block *B, AccessKinds AK) {
1168 if (!B->isDummy())
1169 return true;
1170
1171 const ValueDecl *D = B->getDescriptor()->asValueDecl();
1172 if (!D)
1173 return false;
1174
1175 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1176 return diagnoseUnknownDecl(S, OpPC, D);
1177
1178 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1179 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1180 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1181 }
1182 return false;
1183}
1184
1185static bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1186 const CallExpr *CE, unsigned ArgSize) {
1187 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1188 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1189 unsigned Offset = 0;
1190 unsigned Index = 0;
1191 for (const Expr *Arg : Args) {
1192 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1193 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1194 if (ArgPtr.isZero()) {
1195 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1196 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1197 return false;
1198 }
1199 }
1200
1201 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(PT: PT_Ptr)));
1202 ++Index;
1203 }
1204 return true;
1205}
1206
1207static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1208 const Pointer &BasePtr,
1209 const Descriptor *Desc) {
1210 assert(Desc->isRecord());
1211 const Record *R = Desc->ElemRecord;
1212 assert(R);
1213
1214 if (!S.Current->isBottomFrame() && S.Current->hasThisPointer() &&
1215 S.Current->getFunction()->isDestructor() &&
1216 Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis())) {
1217 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1218 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1219 return false;
1220 }
1221
1222 // Destructor of this record.
1223 const CXXDestructorDecl *Dtor = R->getDestructor();
1224 assert(Dtor);
1225 assert(!Dtor->isTrivial());
1226 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1227 if (!DtorFunc)
1228 return false;
1229
1230 S.Stk.push<Pointer>(Args: BasePtr);
1231 return Call(S, OpPC, Func: DtorFunc, VarArgSize: 0);
1232}
1233
1234static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1235 assert(B);
1236 const Descriptor *Desc = B->getDescriptor();
1237
1238 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1239 return true;
1240
1241 assert(Desc->isRecord() || Desc->isCompositeArray());
1242
1243 if (Desc->hasTrivialDtor())
1244 return true;
1245
1246 if (Desc->isCompositeArray()) {
1247 unsigned N = Desc->getNumElems();
1248 if (N == 0)
1249 return true;
1250 const Descriptor *ElemDesc = Desc->ElemDesc;
1251 assert(ElemDesc->isRecord());
1252
1253 Pointer RP(const_cast<Block *>(B));
1254 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1255 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1256 return false;
1257 }
1258 return true;
1259 }
1260
1261 assert(Desc->isRecord());
1262 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1263}
1264
1265static bool hasVirtualDestructor(QualType T) {
1266 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1267 if (const CXXDestructorDecl *DD = RD->getDestructor())
1268 return DD->isVirtual();
1269 return false;
1270}
1271
1272bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1273 bool IsGlobalDelete) {
1274 if (!CheckDynamicMemoryAllocation(S, OpPC))
1275 return false;
1276
1277 DynamicAllocator &Allocator = S.getAllocator();
1278
1279 const Expr *Source = nullptr;
1280 const Block *BlockToDelete = nullptr;
1281 {
1282 // Extra scope for this so the block doesn't have this pointer
1283 // pointing to it when we destroy it.
1284 Pointer Ptr = S.Stk.pop<Pointer>();
1285
1286 // Deleteing nullptr is always fine.
1287 if (Ptr.isZero())
1288 return true;
1289
1290 // Remove base casts.
1291 QualType InitialType = Ptr.getType();
1292 Ptr = Ptr.stripBaseCasts();
1293
1294 Source = Ptr.getDeclDesc()->asExpr();
1295 BlockToDelete = Ptr.block();
1296
1297 // Check that new[]/delete[] or new/delete were used, not a mixture.
1298 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1299 if (std::optional<DynamicAllocator::Form> AllocForm =
1300 Allocator.getAllocationForm(Source)) {
1301 DynamicAllocator::Form DeleteForm =
1302 DeleteIsArrayForm ? DynamicAllocator::Form::Array
1303 : DynamicAllocator::Form::NonArray;
1304 if (!CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1305 NewExpr: Source))
1306 return false;
1307 }
1308
1309 // For the non-array case, the types must match if the static type
1310 // does not have a virtual destructor.
1311 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1312 !hasVirtualDestructor(T: InitialType)) {
1313 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1314 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1315 << InitialType << Ptr.getType();
1316 return false;
1317 }
1318
1319 if (!Ptr.isRoot() || (Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray()) ||
1320 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1321 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1322 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1323 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1324 return false;
1325 }
1326
1327 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1328 return false;
1329
1330 // For a class type with a virtual destructor, the selected operator delete
1331 // is the one looked up when building the destructor.
1332 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1333 QualType AllocType = Ptr.getType();
1334 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1335 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1336 if (const CXXDestructorDecl *DD = RD->getDestructor())
1337 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1338 return nullptr;
1339 };
1340
1341 if (const FunctionDecl *VirtualDelete =
1342 getVirtualOperatorDelete(AllocType);
1343 VirtualDelete &&
1344 !VirtualDelete
1345 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1346 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1347 DiagId: diag::note_constexpr_new_non_replaceable)
1348 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1349 return false;
1350 }
1351 }
1352 }
1353 assert(Source);
1354 assert(BlockToDelete);
1355
1356 // Invoke destructors before deallocating the memory.
1357 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1358 return false;
1359
1360 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1361 // Nothing has been deallocated, this must be a double-delete.
1362 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1363 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1364 return false;
1365 }
1366
1367 return true;
1368}
1369
1370void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1371 const APSInt &Value) {
1372 llvm::APInt Min;
1373 llvm::APInt Max;
1374 ED->getValueRange(Max, Min);
1375 --Max;
1376
1377 if (ED->getNumNegativeBits() &&
1378 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1379 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1380 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1381 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1382 << ED;
1383 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1384 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1385 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1386 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1387 << ED;
1388 }
1389}
1390
1391bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1392 assert(T);
1393 assert(!S.getLangOpts().CPlusPlus23);
1394
1395 // C++1y: A constant initializer for an object o [...] may also invoke
1396 // constexpr constructors for o and its subobjects even if those objects
1397 // are of non-literal class types.
1398 //
1399 // C++11 missed this detail for aggregates, so classes like this:
1400 // struct foo_t { union { int i; volatile int j; } u; };
1401 // are not (obviously) initializable like so:
1402 // __attribute__((__require_constant_initialization__))
1403 // static const foo_t x = {{0}};
1404 // because "i" is a subobject with non-literal initialization (due to the
1405 // volatile member of the union). See:
1406 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1407 // Therefore, we use the C++1y behavior.
1408
1409 if (!S.Current->isBottomFrame() &&
1410 S.Current->getFunction()->isConstructor() &&
1411 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1412 return true;
1413 }
1414
1415 const Expr *E = S.Current->getExpr(PC: OpPC);
1416 if (S.getLangOpts().CPlusPlus11)
1417 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1418 else
1419 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1420 return false;
1421}
1422
1423static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1424 uint32_t Off) {
1425 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1426 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1427 return false;
1428
1429 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1430 return false;
1431 if (!CheckArray(S, OpPC, Ptr))
1432 return false;
1433 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1434 return false;
1435
1436 if (Ptr.isIntegralPointer()) {
1437 if (std::optional<IntPointer> IntPtr =
1438 Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off)) {
1439 S.Stk.push<Pointer>(Args: std::move(*IntPtr));
1440 return true;
1441 }
1442 return false;
1443 }
1444
1445 if (!Ptr.isBlockPointer()) {
1446 // FIXME: The only time we (seem to) get here is when trying to access a
1447 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1448 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1449 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1450 DiagId: diag::note_constexpr_access_unreadable_object)
1451 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1452 return false;
1453 }
1454
1455 // We can't get the field of something that's not a record.
1456 if (!Ptr.getFieldDesc()->isRecord())
1457 return false;
1458
1459 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1460 return false;
1461
1462 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1463 return true;
1464}
1465
1466bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1467 const auto &Ptr = S.Stk.peek<Pointer>();
1468 return getField(S, OpPC, Ptr, Off);
1469}
1470
1471bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1472 const auto &Ptr = S.Stk.pop<Pointer>();
1473 return getField(S, OpPC, Ptr, Off);
1474}
1475
1476static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1477 const Pointer &ThisPtr) {
1478 assert(Func->isConstructor());
1479
1480 if (Func->getParentDecl()->isInvalidDecl())
1481 return false;
1482
1483 const Descriptor *D = ThisPtr.getFieldDesc();
1484 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1485 // subobject of a composite array.
1486 if (!D->ElemRecord)
1487 return true;
1488
1489 if (D->ElemRecord->getNumVirtualBases() == 0)
1490 return true;
1491
1492 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1493 << Func->getParentDecl();
1494 return false;
1495}
1496
1497bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1498 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1499 return false;
1500 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1501 return false;
1502 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1503 return false;
1504 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Destroy))
1505 return false;
1506
1507 // Can't call a dtor on a global variable.
1508 if (Ptr.block()->isStatic()) {
1509 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1510 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1511 return false;
1512 }
1513 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1514}
1515
1516/// Opcode. Check if the function decl can be called at compile time.
1517bool CheckFunctionDecl(InterpState &S, CodePtr OpPC, const FunctionDecl *FD) {
1518 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1519 return false;
1520
1521 const FunctionDecl *Definition = nullptr;
1522 const Stmt *Body = FD->getBody(Definition);
1523
1524 if (Definition && Body &&
1525 (Definition->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1526 Definition->hasAttr<MSConstexprAttr>())))
1527 return true;
1528
1529 return diagnoseCallableDecl(S, OpPC, DiagDecl: FD);
1530}
1531
1532bool CheckBitCast(InterpState &S, CodePtr OpPC, const Type *TargetType,
1533 bool SrcIsVoidPtr) {
1534 const auto &Ptr = S.Stk.peek<Pointer>();
1535 if (Ptr.isZero())
1536 return true;
1537 if (!Ptr.isBlockPointer())
1538 return true;
1539
1540 if (TargetType->isIntegerType())
1541 return true;
1542
1543 if (SrcIsVoidPtr && S.getLangOpts().CPlusPlus) {
1544 bool HasValidResult = !Ptr.isZero();
1545
1546 if (HasValidResult) {
1547 if (S.getStdAllocatorCaller(Name: "allocate"))
1548 return true;
1549
1550 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
1551 if (S.getLangOpts().CPlusPlus26 &&
1552 S.getASTContext().hasSimilarType(T1: Ptr.getType(),
1553 T2: QualType(TargetType, 0)))
1554 return true;
1555
1556 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_void_star_cast)
1557 << E->getSubExpr()->getType() << S.getLangOpts().CPlusPlus26
1558 << Ptr.getType().getCanonicalType() << E->getType()->getPointeeType();
1559 } else if (!S.getLangOpts().CPlusPlus26) {
1560 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1561 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1562 << diag::ConstexprInvalidCastKind::CastFrom << "'void *'"
1563 << S.Current->getRange(PC: OpPC);
1564 }
1565 }
1566
1567 QualType PtrType = Ptr.getType();
1568 if (PtrType->isRecordType() &&
1569 PtrType->getAsRecordDecl() != TargetType->getAsRecordDecl()) {
1570 S.CCEDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_invalid_cast)
1571 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
1572 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1573 return false;
1574 }
1575 return true;
1576}
1577
1578static void compileFunction(InterpState &S, const Function *Func) {
1579 const FunctionDecl *Definition = Func->getDecl()->getDefinition();
1580 if (!Definition)
1581 return;
1582
1583 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1584 .compileFunc(FuncDecl: Definition, Func: const_cast<Function *>(Func));
1585}
1586
1587bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1588 uint32_t VarArgSize) {
1589 if (Func->hasThisPointer()) {
1590 size_t ArgSize = Func->getArgSize() + VarArgSize;
1591 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1592 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1593
1594 // If the current function is a lambda static invoker and
1595 // the function we're about to call is a lambda call operator,
1596 // skip the CheckInvoke, since the ThisPtr is a null pointer
1597 // anyway.
1598 if (!(S.Current->getFunction() &&
1599 S.Current->getFunction()->isLambdaStaticInvoker() &&
1600 Func->isLambdaCallOperator())) {
1601 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1602 return false;
1603 }
1604
1605 if (S.checkingPotentialConstantExpression())
1606 return false;
1607 }
1608
1609 if (!Func->isFullyCompiled())
1610 compileFunction(S, Func);
1611
1612 if (!CheckCallable(S, OpPC, F: Func))
1613 return false;
1614
1615 if (!CheckCallDepth(S, OpPC))
1616 return false;
1617
1618 auto Memory = new char[InterpFrame::allocSize(F: Func)];
1619 auto NewFrame = new (Memory) InterpFrame(S, Func, OpPC, VarArgSize);
1620 InterpFrame *FrameBefore = S.Current;
1621 S.Current = NewFrame;
1622
1623 // Note that we cannot assert(CallResult.hasValue()) here since
1624 // Ret() above only sets the APValue if the curent frame doesn't
1625 // have a caller set.
1626 if (Interpret(S)) {
1627 assert(S.Current == FrameBefore);
1628 return true;
1629 }
1630
1631 InterpFrame::free(F: NewFrame);
1632 // Interpreting the function failed somehow. Reset to
1633 // previous state.
1634 S.Current = FrameBefore;
1635 return false;
1636}
1637bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1638 uint32_t VarArgSize) {
1639
1640 // C doesn't have constexpr functions.
1641 if (!S.getLangOpts().CPlusPlus)
1642 return Invalid(S, OpPC);
1643
1644 assert(Func);
1645 auto cleanup = [&]() -> bool {
1646 cleanupAfterFunctionCall(S, OpPC, Func);
1647 return false;
1648 };
1649
1650 if (Func->hasThisPointer()) {
1651 size_t ArgSize = Func->getArgSize() + VarArgSize;
1652 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1653
1654 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1655
1656 // C++23 [expr.const]p5.6
1657 // an invocation of a virtual function ([class.virtual]) for an object whose
1658 // dynamic type is constexpr-unknown;
1659 if (ThisPtr.isDummy() && Func->isVirtual())
1660 return false;
1661
1662 // If the current function is a lambda static invoker and
1663 // the function we're about to call is a lambda call operator,
1664 // skip the CheckInvoke, since the ThisPtr is a null pointer
1665 // anyway.
1666 if (S.Current->getFunction() &&
1667 S.Current->getFunction()->isLambdaStaticInvoker() &&
1668 Func->isLambdaCallOperator()) {
1669 assert(ThisPtr.isZero());
1670 } else {
1671 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1672 return cleanup();
1673 if (!Func->isConstructor() && !Func->isDestructor() &&
1674 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1675 return false;
1676 }
1677
1678 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1679 return false;
1680 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1681 return false;
1682
1683 if (Func->isConstructor() || Func->isDestructor())
1684 S.InitializingBlocks.push_back(Elt: ThisPtr.block());
1685 }
1686
1687 if (!Func->isFullyCompiled())
1688 compileFunction(S, Func);
1689
1690 if (!CheckCallable(S, OpPC, F: Func))
1691 return cleanup();
1692
1693 // Do not evaluate any function calls in checkingPotentialConstantExpression
1694 // mode. Constructors will be aborted later when their initializers are
1695 // evaluated.
1696 if (S.checkingPotentialConstantExpression() && !Func->isConstructor())
1697 return false;
1698
1699 if (!CheckCallDepth(S, OpPC))
1700 return cleanup();
1701
1702 auto Memory = new char[InterpFrame::allocSize(F: Func)];
1703 auto NewFrame = new (Memory) InterpFrame(S, Func, OpPC, VarArgSize);
1704 InterpFrame *FrameBefore = S.Current;
1705 S.Current = NewFrame;
1706
1707 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1708 // Note that we cannot assert(CallResult.hasValue()) here since
1709 // Ret() above only sets the APValue if the curent frame doesn't
1710 // have a caller set.
1711 bool Success = Interpret(S);
1712 // Remove initializing block again.
1713 if (Func->isConstructor() || Func->isDestructor())
1714 S.InitializingBlocks.pop_back();
1715
1716 if (!Success) {
1717 InterpFrame::free(F: NewFrame);
1718 // Interpreting the function failed somehow. Reset to
1719 // previous state.
1720 S.Current = FrameBefore;
1721 return false;
1722 }
1723
1724 assert(S.Current == FrameBefore);
1725 return true;
1726}
1727
1728static bool getDynamicDecl(InterpState &S, CodePtr OpPC, Pointer TypePtr,
1729 const CXXRecordDecl *&DynamicDecl) {
1730 TypePtr = TypePtr.stripBaseCasts();
1731
1732 QualType DynamicType = TypePtr.getType();
1733 if (TypePtr.isStatic() || TypePtr.isConst()) {
1734 if (const VarDecl *VD = TypePtr.getDeclDesc()->asVarDecl();
1735 VD && !VD->isConstexpr()) {
1736 const Expr *E = S.Current->getExpr(PC: OpPC);
1737 APValue V = TypePtr.toAPValue(ASTCtx: S.getASTContext());
1738 QualType TT = S.getASTContext().getLValueReferenceType(T: DynamicType);
1739 S.FFDiag(E, DiagId: diag::note_constexpr_polymorphic_unknown_dynamic_type)
1740 << AccessKinds::AK_MemberCall << V.getAsString(Ctx: S.getASTContext(), Ty: TT);
1741 return false;
1742 }
1743 }
1744
1745 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) {
1746 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1747 } else if (DynamicType->isArrayType()) {
1748 const Type *ElemType = DynamicType->getPointeeOrArrayElementType();
1749 assert(ElemType);
1750 DynamicDecl = ElemType->getAsCXXRecordDecl();
1751 } else {
1752 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1753 }
1754 return DynamicDecl != nullptr;
1755}
1756
1757bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1758 uint32_t VarArgSize) {
1759 assert(Func->hasThisPointer());
1760 assert(Func->isVirtual());
1761 size_t ArgSize = Func->getArgSize() + VarArgSize;
1762 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1763 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1764 const FunctionDecl *Callee = Func->getDecl();
1765
1766 const CXXRecordDecl *DynamicDecl = nullptr;
1767 if (!getDynamicDecl(S, OpPC, TypePtr: ThisPtr, DynamicDecl))
1768 return false;
1769 assert(DynamicDecl);
1770
1771 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1772 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1773 const CXXMethodDecl *Overrider;
1774
1775 if (StaticDecl != DynamicDecl &&
1776 !llvm::is_contained(Range&: S.InitializingBlocks, Element: ThisPtr.block())) {
1777 if (!DynamicDecl->isDerivedFrom(Base: StaticDecl))
1778 return false;
1779 Overrider = S.getContext().getOverridingFunction(DynamicDecl, StaticDecl,
1780 InitialFunction);
1781
1782 } else {
1783 Overrider = InitialFunction;
1784 }
1785
1786 // C++2a [class.abstract]p6:
1787 // the effect of making a virtual call to a pure virtual function [...] is
1788 // undefined
1789 if (Overrider->isPureVirtual()) {
1790 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1791 ExtraNotes: 1)
1792 << Callee;
1793 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1794 return false;
1795 }
1796
1797 if (Overrider != InitialFunction) {
1798 // DR1872: An instantiated virtual constexpr function can't be called in a
1799 // constant expression (prior to C++20). We can still constant-fold such a
1800 // call.
1801 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1802 const Expr *E = S.Current->getExpr(PC: OpPC);
1803 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1804 }
1805
1806 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1807
1808 const CXXRecordDecl *ThisFieldDecl =
1809 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1810 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1811 // If the function we call is further DOWN the hierarchy than the
1812 // FieldDesc of our pointer, just go up the hierarchy of this field
1813 // the furthest we can go.
1814 ThisPtr = ThisPtr.stripBaseCasts();
1815 }
1816 }
1817
1818 if (!Call(S, OpPC, Func, VarArgSize))
1819 return false;
1820
1821 // Covariant return types. The return type of Overrider is a pointer
1822 // or reference to a class type.
1823 if (Overrider != InitialFunction &&
1824 Overrider->getReturnType()->isPointerOrReferenceType() &&
1825 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1826 QualType OverriderPointeeType =
1827 Overrider->getReturnType()->getPointeeType();
1828 QualType InitialPointeeType =
1829 InitialFunction->getReturnType()->getPointeeType();
1830 // We've called Overrider above, but calling code expects us to return what
1831 // InitialFunction returned. According to the rules for covariant return
1832 // types, what InitialFunction returns needs to be a base class of what
1833 // Overrider returns. So, we need to do an upcast here.
1834 unsigned Offset = S.getContext().collectBaseOffset(
1835 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1836 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1837 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1838 }
1839
1840 return true;
1841}
1842
1843bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1844 uint32_t BuiltinID) {
1845 // A little arbitrary, but the current interpreter allows evaluation
1846 // of builtin functions in this mode, with some exceptions.
1847 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1848 S.checkingPotentialConstantExpression())
1849 return false;
1850
1851 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1852}
1853
1854bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1855 const CallExpr *CE) {
1856 const Pointer &Ptr = S.Stk.pop<Pointer>();
1857
1858 if (Ptr.isZero()) {
1859 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_null_callee)
1860 << const_cast<Expr *>(CE->getCallee()) << CE->getSourceRange();
1861 return false;
1862 }
1863
1864 if (!Ptr.isFunctionPointer())
1865 return Invalid(S, OpPC);
1866
1867 const Function *F = Ptr.asFunctionPointer().Func;
1868 assert(F);
1869 // Don't allow calling block pointers.
1870 if (!F->getDecl())
1871 return Invalid(S, OpPC);
1872
1873 // This happens when the call expression has been cast to
1874 // something else, but we don't support that.
1875 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1876 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1877 return false;
1878
1879 // Check argument nullability state.
1880 if (F->hasNonNullAttr()) {
1881 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1882 return false;
1883 }
1884
1885 // Can happen when casting function pointers around.
1886 QualType CalleeType = CE->getCallee()->getType();
1887 if (CalleeType->isPointerType() &&
1888 !S.getASTContext().hasSameFunctionTypeIgnoringExceptionSpec(
1889 T: F->getDecl()->getType(), U: CalleeType->getPointeeType())) {
1890 return false;
1891 }
1892
1893 // We nedd to compile (and check) early for function pointer calls
1894 // because the Call/CallVirt below might access the instance pointer
1895 // but the Function's information about them is wrong.
1896 if (!F->isFullyCompiled())
1897 compileFunction(S, Func: F);
1898
1899 if (!CheckCallable(S, OpPC, F))
1900 return false;
1901
1902 assert(ArgSize >= F->getWrittenArgSize());
1903 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1904
1905 // We need to do this explicitly here since we don't have the necessary
1906 // information to do it automatically.
1907 if (F->isThisPointerExplicit())
1908 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1909
1910 if (F->isVirtual())
1911 return CallVirt(S, OpPC, Func: F, VarArgSize);
1912
1913 return Call(S, OpPC, Func: F, VarArgSize);
1914}
1915
1916static void startLifetimeRecurse(const Pointer &Ptr) {
1917 if (const Record *R = Ptr.getRecord()) {
1918 Ptr.startLifetime();
1919 for (const Record::Field &Fi : R->fields())
1920 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1921 return;
1922 }
1923
1924 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1925 FieldDesc->isCompositeArray()) {
1926 assert(Ptr.getLifetime() == Lifetime::Started);
1927 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1928 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1929 return;
1930 }
1931
1932 Ptr.startLifetime();
1933}
1934
1935bool StartLifetime(InterpState &S, CodePtr OpPC) {
1936 const auto &Ptr = S.Stk.peek<Pointer>();
1937 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1938 return false;
1939 startLifetimeRecurse(Ptr: Ptr.narrow());
1940 return true;
1941}
1942
1943// FIXME: It might be better to the recursing as part of the generated code for
1944// a destructor?
1945static void endLifetimeRecurse(const Pointer &Ptr) {
1946 if (const Record *R = Ptr.getRecord()) {
1947 Ptr.endLifetime();
1948 for (const Record::Field &Fi : R->fields())
1949 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1950 return;
1951 }
1952
1953 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1954 FieldDesc->isCompositeArray()) {
1955 // No endLifetime() for array roots.
1956 assert(Ptr.getLifetime() == Lifetime::Started);
1957 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1958 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1959 return;
1960 }
1961
1962 Ptr.endLifetime();
1963}
1964
1965/// Ends the lifetime of the peek'd pointer.
1966bool EndLifetime(InterpState &S, CodePtr OpPC) {
1967 const auto &Ptr = S.Stk.peek<Pointer>();
1968 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1969 return false;
1970
1971 endLifetimeRecurse(Ptr: Ptr.narrow());
1972 return true;
1973}
1974
1975/// Ends the lifetime of the pop'd pointer.
1976bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
1977 const auto &Ptr = S.Stk.pop<Pointer>();
1978 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1979 return false;
1980
1981 endLifetimeRecurse(Ptr: Ptr.narrow());
1982 return true;
1983}
1984
1985bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1986 std::optional<uint64_t> ArraySize) {
1987 const Pointer &Ptr = S.Stk.peek<Pointer>();
1988
1989 if (Ptr.inUnion() && Ptr.getBase().getRecord()->isUnion())
1990 Ptr.activate();
1991
1992 if (Ptr.isZero()) {
1993 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_null)
1994 << AK_Construct;
1995 return false;
1996 }
1997
1998 if (!Ptr.isBlockPointer())
1999 return false;
2000
2001 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2002 return false;
2003
2004 startLifetimeRecurse(Ptr);
2005
2006 // Similar to CheckStore(), but with the additional CheckTemporary() call and
2007 // the AccessKinds are different.
2008 if (!Ptr.block()->isAccessible()) {
2009 if (!CheckExtern(S, OpPC, Ptr))
2010 return false;
2011 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
2012 return false;
2013 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Construct);
2014 }
2015 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Construct))
2016 return false;
2017
2018 // CheckLifetime for this and all base pointers.
2019 for (Pointer P = Ptr;;) {
2020 if (!CheckLifetime(S, OpPC, LT: P.getLifetime(), AK: AK_Construct))
2021 return false;
2022
2023 if (P.isRoot())
2024 break;
2025 P = P.getBase();
2026 }
2027
2028 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2029 return false;
2030 if (!CheckGlobal(S, OpPC, Ptr))
2031 return false;
2032 if (!CheckConst(S, OpPC, Ptr))
2033 return false;
2034 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
2035 return false;
2036
2037 if (!InvalidNewDeleteExpr(S, OpPC, E))
2038 return false;
2039
2040 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
2041 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
2042 const ASTContext &ASTCtx = S.getASTContext();
2043 QualType AllocType;
2044 if (ArraySize) {
2045 AllocType = ASTCtx.getConstantArrayType(
2046 EltTy: NewExpr->getAllocatedType(),
2047 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
2048 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
2049 } else {
2050 AllocType = NewExpr->getAllocatedType();
2051 }
2052
2053 unsigned StorageSize = 1;
2054 unsigned AllocSize = 1;
2055 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
2056 AllocSize = CAT->getZExtSize();
2057 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
2058 StorageSize = CAT->getZExtSize();
2059
2060 if (AllocSize > StorageSize ||
2061 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
2062 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
2063 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
2064 DiagId: diag::note_constexpr_placement_new_wrong_type)
2065 << StorageType << AllocType;
2066 return false;
2067 }
2068
2069 // Can't activate fields in a union, unless the direct base is the union.
2070 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
2071 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
2072
2073 return true;
2074}
2075
2076bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
2077 assert(E);
2078
2079 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
2080 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
2081
2082 if (NewExpr->getNumPlacementArgs() > 0) {
2083 // This is allowed pre-C++26, but only an std function or if
2084 // [[msvc::constexpr]] was used.
2085 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction() ||
2086 S.Current->MSVCConstexprAllowed)
2087 return true;
2088
2089 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2090 << /*C++26 feature*/ 1 << E->getSourceRange();
2091 } else if (
2092 !OperatorNew
2093 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2094 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2095 DiagId: diag::note_constexpr_new_non_replaceable)
2096 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
2097 return false;
2098 } else if (!S.getLangOpts().CPlusPlus26 &&
2099 NewExpr->getNumPlacementArgs() == 1 &&
2100 !OperatorNew->isReservedGlobalPlacementOperator()) {
2101 if (!S.getLangOpts().CPlusPlus26) {
2102 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2103 << /*Unsupported*/ 0 << E->getSourceRange();
2104 return false;
2105 }
2106 return true;
2107 }
2108 } else {
2109 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
2110 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
2111 if (!OperatorDelete
2112 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2113 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2114 DiagId: diag::note_constexpr_new_non_replaceable)
2115 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
2116 return false;
2117 }
2118 }
2119
2120 return false;
2121}
2122
2123bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
2124 const FixedPoint &FP) {
2125 const Expr *E = S.Current->getExpr(PC: OpPC);
2126 if (S.checkingForUndefinedBehavior()) {
2127 S.getASTContext().getDiagnostics().Report(
2128 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
2129 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2130 }
2131 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
2132 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2133 return S.noteUndefinedBehavior();
2134}
2135
2136bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
2137 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
2138 S.FFDiag(SI: Loc,
2139 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
2140 << Index;
2141 return false;
2142}
2143
2144bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
2145 const Pointer &Ptr, unsigned BitWidth) {
2146 const SourceInfo &E = S.Current->getSource(PC: OpPC);
2147 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
2148 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2149
2150 if (Ptr.isDummy())
2151 return false;
2152 if (Ptr.isFunctionPointer())
2153 return true;
2154
2155 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
2156 // Only allow based lvalue casts if they are lossless.
2157 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
2158 BitWidth)
2159 return Invalid(S, OpPC);
2160 }
2161 return true;
2162}
2163
2164bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2165 const Pointer &Ptr = S.Stk.pop<Pointer>();
2166
2167 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2168 return false;
2169
2170 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
2171 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2172
2173 S.Stk.push<IntegralAP<false>>(Args&: Result);
2174 return true;
2175}
2176
2177bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2178 const Pointer &Ptr = S.Stk.pop<Pointer>();
2179
2180 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2181 return false;
2182
2183 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
2184 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2185
2186 S.Stk.push<IntegralAP<true>>(Args&: Result);
2187 return true;
2188}
2189
2190bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
2191 bool TargetIsUCharOrByte) {
2192 // This is always fine.
2193 if (!HasIndeterminateBits)
2194 return true;
2195
2196 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
2197 if (TargetIsUCharOrByte)
2198 return true;
2199
2200 const Expr *E = S.Current->getExpr(PC: OpPC);
2201 QualType ExprType = E->getType();
2202 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
2203 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
2204 return false;
2205}
2206
2207bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
2208 const Type *TypeInfoType) {
2209 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
2210 return true;
2211}
2212
2213bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
2214 const auto &P = S.Stk.pop<Pointer>();
2215
2216 if (!P.isBlockPointer())
2217 return false;
2218
2219 // Pick the most-derived type.
2220 CanQualType T = P.getDeclPtr().getType()->getCanonicalTypeUnqualified();
2221 // ... unless we're currently constructing this object.
2222 // FIXME: We have a similar check to this in more places.
2223 if (S.Current->getFunction()) {
2224 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
2225 if (const Function *Func = Frame->getFunction();
2226 Func && (Func->isConstructor() || Func->isDestructor()) &&
2227 P.block() == Frame->getThis().block()) {
2228 T = S.getContext().getASTContext().getCanonicalTagType(
2229 TD: Func->getParentDecl());
2230 break;
2231 }
2232 }
2233 }
2234
2235 S.Stk.push<Pointer>(Args: T->getTypePtr(), Args&: TypeInfoType);
2236 return true;
2237}
2238
2239bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2240 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2241 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2242 << E->getExprOperand()->getType()
2243 << E->getExprOperand()->getSourceRange();
2244 return false;
2245}
2246
2247bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2248 const Pointer &RHS) {
2249 unsigned LHSOffset = LHS.isOnePastEnd() ? LHS.getNumElems() : LHS.getIndex();
2250 unsigned RHSOffset = RHS.isOnePastEnd() ? RHS.getNumElems() : RHS.getIndex();
2251 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2252 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2253
2254 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2255 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2256 int32_t IndexDiff = RHSOffset - LHSOffset;
2257 if (IndexDiff < 0) {
2258 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2259 return false;
2260 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2261 } else {
2262 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2263 return false;
2264 RHSStr = RHSStr.drop_front(N: IndexDiff);
2265 }
2266
2267 unsigned ShorterCharWidth;
2268 StringRef Shorter;
2269 StringRef Longer;
2270 if (LHSLength < RHSLength) {
2271 ShorterCharWidth = LHS.elemSize();
2272 Shorter = LHSStr;
2273 Longer = RHSStr;
2274 } else {
2275 ShorterCharWidth = RHS.elemSize();
2276 Shorter = RHSStr;
2277 Longer = LHSStr;
2278 }
2279
2280 // The null terminator isn't included in the string data, so check for it
2281 // manually. If the longer string doesn't have a null terminator where the
2282 // shorter string ends, they aren't potentially overlapping.
2283 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2284 if (Shorter.size() + NullByte >= Longer.size())
2285 break;
2286 if (Longer[Shorter.size() + NullByte])
2287 return false;
2288 }
2289 return Shorter == Longer.take_front(N: Shorter.size());
2290}
2291
2292static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2293 PrimType T) {
2294 if (T == PT_IntAPS) {
2295 auto &Val = Ptr.deref<IntegralAP<true>>();
2296 if (!Val.singleWord()) {
2297 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2298 Val.take(NewMemory);
2299 }
2300 } else if (T == PT_IntAP) {
2301 auto &Val = Ptr.deref<IntegralAP<false>>();
2302 if (!Val.singleWord()) {
2303 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2304 Val.take(NewMemory);
2305 }
2306 } else if (T == PT_Float) {
2307 auto &Val = Ptr.deref<Floating>();
2308 if (!Val.singleWord()) {
2309 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2310 Val.take(NewMemory);
2311 }
2312 } else if (T == PT_MemberPtr) {
2313 auto &Val = Ptr.deref<MemberPointer>();
2314 unsigned PathLength = Val.getPathLength();
2315 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2316 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2317 Val.takePath(NewPath);
2318 }
2319}
2320
2321template <typename T>
2322static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2323 assert(needsAlloc<T>());
2324 if constexpr (std::is_same_v<T, MemberPointer>) {
2325 auto &Val = Ptr.deref<MemberPointer>();
2326 unsigned PathLength = Val.getPathLength();
2327 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2328 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2329 Val.takePath(NewPath);
2330 } else {
2331 auto &Val = Ptr.deref<T>();
2332 if (!Val.singleWord()) {
2333 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2334 Val.take(NewMemory);
2335 }
2336 }
2337}
2338
2339static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2340 if (const Record *R = Ptr.getRecord()) {
2341 for (const Record::Field &Fi : R->fields()) {
2342 if (Fi.Desc->isPrimitive()) {
2343 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2344 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2345 });
2346 } else {
2347 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2348 }
2349 }
2350 return;
2351 }
2352
2353 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2354 unsigned NumElems = D->getNumElems();
2355 if (NumElems == 0)
2356 return;
2357
2358 if (D->isPrimitiveArray()) {
2359 PrimType PT = D->getPrimType();
2360 if (!needsAlloc(T: PT))
2361 return;
2362 assert(NumElems >= 1);
2363 const Pointer EP = Ptr.atIndex(Idx: 0);
2364 bool AllSingleWord = true;
2365 TYPE_SWITCH_ALLOC(PT, {
2366 if (!EP.deref<T>().singleWord()) {
2367 copyPrimitiveMemory<T>(S, EP);
2368 AllSingleWord = false;
2369 }
2370 });
2371 if (AllSingleWord)
2372 return;
2373 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2374 const Pointer EP = Ptr.atIndex(Idx: I);
2375 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2376 }
2377 } else {
2378 assert(D->isCompositeArray());
2379 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2380 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2381 finishGlobalRecurse(S, Ptr: EP);
2382 }
2383 }
2384 }
2385}
2386
2387bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2388 const Pointer &Ptr = S.Stk.pop<Pointer>();
2389
2390 finishGlobalRecurse(S, Ptr);
2391 if (Ptr.canBeInitialized()) {
2392 Ptr.initialize();
2393 Ptr.activate();
2394 }
2395
2396 return true;
2397}
2398
2399bool InvalidCast(InterpState &S, CodePtr OpPC, CastKind Kind, bool Fatal) {
2400 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
2401
2402 switch (Kind) {
2403 case CastKind::Reinterpret:
2404 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2405 << diag::ConstexprInvalidCastKind::Reinterpret
2406 << S.Current->getRange(PC: OpPC);
2407 return !Fatal;
2408 case CastKind::ReinterpretLike:
2409 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2410 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
2411 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2412 return !Fatal;
2413 case CastKind::Volatile:
2414 if (!S.checkingPotentialConstantExpression()) {
2415 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
2416 if (S.getLangOpts().CPlusPlus)
2417 S.FFDiag(E, DiagId: diag::note_constexpr_access_volatile_type)
2418 << AK_Read << E->getSubExpr()->getType();
2419 else
2420 S.FFDiag(E);
2421 }
2422
2423 return false;
2424 case CastKind::Dynamic:
2425 assert(!S.getLangOpts().CPlusPlus20);
2426 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2427 << diag::ConstexprInvalidCastKind::Dynamic;
2428 return true;
2429 }
2430 llvm_unreachable("Unhandled CastKind");
2431 return false;
2432}
2433
2434bool Destroy(InterpState &S, CodePtr OpPC, uint32_t I) {
2435 assert(S.Current->getFunction());
2436 // FIXME: We iterate the scope once here and then again in the destroy() call
2437 // below.
2438 for (auto &Local : S.Current->getFunction()->getScope(Idx: I).locals_reverse()) {
2439 if (!S.Current->getLocalBlock(Offset: Local.Offset)->isInitialized())
2440 continue;
2441 const Pointer &Ptr = S.Current->getLocalPointer(Offset: Local.Offset);
2442 if (Ptr.getLifetime() == Lifetime::Ended) {
2443 // Try to use the declaration for better diagnostics
2444 if (const Decl *D = Ptr.getDeclDesc()->asDecl()) {
2445 auto *ND = cast<NamedDecl>(Val: D);
2446 S.FFDiag(Loc: ND->getLocation(),
2447 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2448 << ND->getNameAsString();
2449 } else {
2450 S.FFDiag(Loc: Ptr.getDeclDesc()->getLocation(),
2451 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2452 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
2453 }
2454 return false;
2455 }
2456 }
2457
2458 S.Current->destroy(Idx: I);
2459 return true;
2460}
2461
2462// Perform a cast towards the class of the Decl (either up or down the
2463// hierarchy).
2464static bool castBackMemberPointer(InterpState &S,
2465 const MemberPointer &MemberPtr,
2466 int32_t BaseOffset,
2467 const RecordDecl *BaseDecl) {
2468 const CXXRecordDecl *Expected;
2469 if (MemberPtr.getPathLength() >= 2)
2470 Expected = MemberPtr.getPathEntry(Index: MemberPtr.getPathLength() - 2);
2471 else
2472 Expected = MemberPtr.getRecordDecl();
2473
2474 assert(Expected);
2475 if (Expected->getCanonicalDecl() != BaseDecl->getCanonicalDecl()) {
2476 // C++11 [expr.static.cast]p12: In a conversion from (D::*) to (B::*),
2477 // if B does not contain the original member and is not a base or
2478 // derived class of the class containing the original member, the result
2479 // of the cast is undefined.
2480 // C++11 [conv.mem]p2 does not cover this case for a cast from (B::*) to
2481 // (D::*). We consider that to be a language defect.
2482 return false;
2483 }
2484
2485 unsigned OldPathLength = MemberPtr.getPathLength();
2486 unsigned NewPathLength = OldPathLength - 1;
2487 bool IsDerivedMember = NewPathLength != 0;
2488 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2489 std::copy_n(first: MemberPtr.path(), n: NewPathLength, result: NewPath);
2490
2491 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2492 Path: NewPath, NewIsDerived: IsDerivedMember));
2493 return true;
2494}
2495
2496static bool appendToMemberPointer(InterpState &S,
2497 const MemberPointer &MemberPtr,
2498 int32_t BaseOffset,
2499 const RecordDecl *BaseDecl,
2500 bool IsDerivedMember) {
2501 unsigned OldPathLength = MemberPtr.getPathLength();
2502 unsigned NewPathLength = OldPathLength + 1;
2503
2504 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2505 std::copy_n(first: MemberPtr.path(), n: OldPathLength, result: NewPath);
2506 NewPath[OldPathLength] = cast<CXXRecordDecl>(Val: BaseDecl);
2507
2508 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2509 Path: NewPath, NewIsDerived: IsDerivedMember));
2510 return true;
2511}
2512
2513/// DerivedToBaseMemberPointer
2514bool CastMemberPtrBasePop(InterpState &S, CodePtr OpPC, int32_t Off,
2515 const RecordDecl *BaseDecl) {
2516 const auto &Ptr = S.Stk.pop<MemberPointer>();
2517
2518 if (!Ptr.isDerivedMember() && Ptr.hasPath())
2519 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2520
2521 bool IsDerivedMember = Ptr.isDerivedMember() || !Ptr.hasPath();
2522 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl, IsDerivedMember);
2523}
2524
2525/// BaseToDerivedMemberPointer
2526bool CastMemberPtrDerivedPop(InterpState &S, CodePtr OpPC, int32_t Off,
2527 const RecordDecl *BaseDecl) {
2528 const auto &Ptr = S.Stk.pop<MemberPointer>();
2529
2530 if (!Ptr.isDerivedMember()) {
2531 // Simply append.
2532 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl,
2533 /*IsDerivedMember=*/false);
2534 }
2535
2536 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2537}
2538
2539// FIXME: Would be nice to generate this instead of hardcoding it here.
2540constexpr bool OpReturns(Opcode Op) {
2541 return Op == OP_RetVoid || Op == OP_RetValue || Op == OP_NoRet ||
2542 Op == OP_RetSint8 || Op == OP_RetUint8 || Op == OP_RetSint16 ||
2543 Op == OP_RetUint16 || Op == OP_RetSint32 || Op == OP_RetUint32 ||
2544 Op == OP_RetSint64 || Op == OP_RetUint64 || Op == OP_RetIntAP ||
2545 Op == OP_RetIntAPS || Op == OP_RetBool || Op == OP_RetFixedPoint ||
2546 Op == OP_RetPtr || Op == OP_RetMemberPtr || Op == OP_RetFloat ||
2547 Op == OP_EndSpeculation;
2548}
2549
2550#if USE_TAILCALLS
2551PRESERVE_NONE static bool InterpNext(InterpState &S, CodePtr &PC);
2552#endif
2553
2554// The dispatcher functions read the opcode arguments from the
2555// bytecode and call the implementation function.
2556#define GET_INTERPFN_DISPATCHERS
2557#include "Opcodes.inc"
2558#undef GET_INTERPFN_DISPATCHERS
2559
2560using InterpFn = bool (*)(InterpState &, CodePtr &PC) PRESERVE_NONE;
2561// Array of the dispatcher functions defined above.
2562const InterpFn InterpFunctions[] = {
2563#define GET_INTERPFN_LIST
2564#include "Opcodes.inc"
2565#undef GET_INTERPFN_LIST
2566};
2567
2568#if USE_TAILCALLS
2569// Read the next opcode and call the dispatcher function.
2570PRESERVE_NONE static bool InterpNext(InterpState &S, CodePtr &PC) {
2571 auto Op = PC.read<Opcode>();
2572 auto Fn = InterpFunctions[Op];
2573 MUSTTAIL return Fn(S, PC);
2574}
2575#endif
2576
2577bool Interpret(InterpState &S) {
2578 // The current stack frame when we started Interpret().
2579 // This is being used by the ops to determine wheter
2580 // to return from this function and thus terminate
2581 // interpretation.
2582 assert(!S.Current->isRoot());
2583 CodePtr PC = S.Current->getPC();
2584
2585#if USE_TAILCALLS
2586 return InterpNext(S, PC);
2587#else
2588 while (true) {
2589 auto Op = PC.read<Opcode>();
2590 auto Fn = InterpFunctions[Op];
2591
2592 if (!Fn(S, PC))
2593 return false;
2594 if (OpReturns(Op))
2595 break;
2596 }
2597 return true;
2598#endif
2599}
2600
2601/// This is used to implement speculative execution via __builtin_constant_p
2602/// when we generate bytecode.
2603///
2604/// The setup here is that we use the same tailcall mechanism for speculative
2605/// evaluation that we use for the regular one.
2606/// Since each speculative execution ends with an EndSpeculation opcode,
2607/// that one does NOT call InterpNext() but simply returns true.
2608/// This way, we return back to this function when we see an EndSpeculation,
2609/// OR (of course), when we encounter an error and one of the opcodes
2610/// returns false.
2611PRESERVE_NONE static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset,
2612 PrimType PT) {
2613 [[maybe_unused]] CodePtr PCBefore = RealPC;
2614 size_t StackSizeBefore = S.Stk.size();
2615
2616 // Speculation depth must be at least 1 here, since we must have
2617 // passed a StartSpeculation op before.
2618#ifndef NDEBUG
2619 [[maybe_unused]] unsigned DepthBefore = S.SpeculationDepth;
2620 assert(DepthBefore >= 1);
2621#endif
2622
2623 CodePtr PC = RealPC;
2624 auto SpeculativeInterp = [&S, &PC]() -> bool {
2625 // Ignore diagnostics during speculative execution.
2626 PushIgnoreDiags(S, OpPC: PC);
2627 auto _ = llvm::scope_exit([&]() { PopIgnoreDiags(S, OpPC: PC); });
2628
2629#if USE_TAILCALLS
2630 auto Op = PC.read<Opcode>();
2631 auto Fn = InterpFunctions[Op];
2632 return Fn(S, PC);
2633#else
2634 while (true) {
2635 auto Op = PC.read<Opcode>();
2636 auto Fn = InterpFunctions[Op];
2637
2638 if (!Fn(S, PC))
2639 return false;
2640 if (OpReturns(Op))
2641 break;
2642 }
2643 return true;
2644#endif
2645 };
2646
2647 if (SpeculativeInterp()) {
2648 // Speculation must've ended naturally via a EndSpeculation opcode.
2649 assert(S.SpeculationDepth == DepthBefore - 1);
2650 if (PT == PT_Ptr) {
2651 const auto &Ptr = S.Stk.pop<Pointer>();
2652 assert(S.Stk.size() == StackSizeBefore);
2653 S.Stk.push<Integral<32, true>>(
2654 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
2655 } else {
2656 // Pop the result from the stack and return success.
2657 TYPE_SWITCH(PT, S.Stk.discard<T>(););
2658 assert(S.Stk.size() == StackSizeBefore);
2659 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
2660 }
2661 } else {
2662 // End the speculation manually since we didn't call EndSpeculation
2663 // naturally.
2664 EndSpeculation(S, OpPC&: RealPC);
2665
2666 if (!S.inConstantContext())
2667 return Invalid(S, OpPC: RealPC);
2668
2669 S.Stk.clearTo(NewSize: StackSizeBefore);
2670 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
2671 }
2672
2673 // RealPC should not have been modified.
2674 assert(*RealPC == *PCBefore);
2675
2676 // We have already evaluated this speculation's EndSpeculation opcode.
2677 assert(S.SpeculationDepth == DepthBefore - 1);
2678
2679 // Jump to end label. This is a little tricker than just RealPC += Offset
2680 // because our usual jump instructions don't have any arguments, to the offset
2681 // we get is a little too much and we need to subtract the size of the
2682 // bool and PrimType arguments again.
2683 int32_t ParamSize = align(Size: sizeof(PrimType));
2684 assert(Offset >= ParamSize);
2685 RealPC += Offset - ParamSize;
2686
2687 return true;
2688}
2689
2690} // namespace interp
2691} // namespace clang
2692