1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31static bool RetValue(InterpState &S, CodePtr &Pt) {
32 llvm::report_fatal_error(reason: "Interpreter cannot return values");
33}
34
35//===----------------------------------------------------------------------===//
36// Jmp, Jt, Jf
37//===----------------------------------------------------------------------===//
38
39static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
40 PC += Offset;
41 return S.noteStep(OpPC: PC);
42}
43
44static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
45 if (S.Stk.pop<bool>()) {
46 PC += Offset;
47 }
48 return S.noteStep(OpPC: PC);
49}
50
51static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
52 if (!S.Stk.pop<bool>()) {
53 PC += Offset;
54 }
55 return S.noteStep(OpPC: PC);
56}
57
58// https://github.com/llvm/llvm-project/issues/102513
59#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
60#pragma optimize("", off)
61#endif
62// FIXME: We have the large switch over all opcodes here again, and in
63// Interpret().
64static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset, PrimType PT) {
65 [[maybe_unused]] CodePtr PCBefore = RealPC;
66 size_t StackSizeBefore = S.Stk.size();
67
68 auto SpeculativeInterp = [&S, RealPC]() -> bool {
69 const InterpFrame *StartFrame = S.Current;
70 CodePtr PC = RealPC;
71
72 for (;;) {
73 auto Op = PC.read<Opcode>();
74 if (Op == OP_EndSpeculation)
75 return true;
76 CodePtr OpPC = PC;
77
78 switch (Op) {
79#define GET_INTERP
80#include "Opcodes.inc"
81#undef GET_INTERP
82 }
83 }
84 llvm_unreachable("We didn't see an EndSpeculation op?");
85 };
86
87 if (SpeculativeInterp()) {
88 if (PT == PT_Ptr) {
89 const auto &Ptr = S.Stk.pop<Pointer>();
90 assert(S.Stk.size() == StackSizeBefore);
91 S.Stk.push<Integral<32, true>>(
92 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
93 } else {
94 // Pop the result from the stack and return success.
95 TYPE_SWITCH(PT, S.Stk.pop<T>(););
96 assert(S.Stk.size() == StackSizeBefore);
97 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
98 }
99 } else {
100 if (!S.inConstantContext())
101 return Invalid(S, OpPC: RealPC);
102
103 S.Stk.clearTo(NewSize: StackSizeBefore);
104 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
105 }
106
107 // RealPC should not have been modified.
108 assert(*RealPC == *PCBefore);
109
110 // Jump to end label. This is a little tricker than just RealPC += Offset
111 // because our usual jump instructions don't have any arguments, to the offset
112 // we get is a little too much and we need to subtract the size of the
113 // bool and PrimType arguments again.
114 int32_t ParamSize = align(Size: sizeof(PrimType));
115 assert(Offset >= ParamSize);
116 RealPC += Offset - ParamSize;
117
118 [[maybe_unused]] CodePtr PCCopy = RealPC;
119 assert(PCCopy.read<Opcode>() == OP_EndSpeculation);
120
121 return true;
122}
123// https://github.com/llvm/llvm-project/issues/102513
124#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
125#pragma optimize("", on)
126#endif
127
128static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
129 const ValueDecl *VD) {
130 const SourceInfo &E = S.Current->getSource(PC: OpPC);
131 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
132 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
133}
134
135static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
136 const ValueDecl *VD);
137static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
138 const ValueDecl *D) {
139 // This function tries pretty hard to produce a good diagnostic. Just skip
140 // that if nobody will see it anyway.
141 if (!S.diagnosing())
142 return false;
143
144 if (isa<ParmVarDecl>(Val: D)) {
145 if (D->getType()->isReferenceType()) {
146 if (S.inConstantContext() && S.getLangOpts().CPlusPlus &&
147 !S.getLangOpts().CPlusPlus11) {
148 diagnoseNonConstVariable(S, OpPC, VD: D);
149 return false;
150 }
151 }
152
153 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
154 if (S.getLangOpts().CPlusPlus23 && D->getType()->isReferenceType()) {
155 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_unknown_variable, ExtraNotes: 1)
156 << AK_Read << D;
157 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
158 } else if (S.getLangOpts().CPlusPlus11) {
159 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown, ExtraNotes: 1) << D;
160 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
161 } else {
162 S.FFDiag(SI: Loc);
163 }
164 return false;
165 }
166
167 if (!D->getType().isConstQualified()) {
168 diagnoseNonConstVariable(S, OpPC, VD: D);
169 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
170 if (!VD->getAnyInitializer()) {
171 diagnoseMissingInitializer(S, OpPC, VD);
172 } else {
173 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
174 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
175 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
176 }
177 }
178
179 return false;
180}
181
182static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
183 const ValueDecl *VD) {
184 if (!S.diagnosing())
185 return;
186
187 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
188 if (!S.getLangOpts().CPlusPlus) {
189 S.FFDiag(SI: Loc);
190 return;
191 }
192
193 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
194 VarD && VarD->getType().isConstQualified() &&
195 !VarD->getAnyInitializer()) {
196 diagnoseMissingInitializer(S, OpPC, VD);
197 return;
198 }
199
200 // Rather random, but this is to match the diagnostic output of the current
201 // interpreter.
202 if (isa<ObjCIvarDecl>(Val: VD))
203 return;
204
205 if (VD->getType()->isIntegralOrEnumerationType()) {
206 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
207 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
208 return;
209 }
210
211 S.FFDiag(SI: Loc,
212 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
213 : diag::note_constexpr_ltor_non_integral,
214 ExtraNotes: 1)
215 << VD << VD->getType();
216 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
217}
218
219static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Block *B,
220 AccessKinds AK) {
221 if (B->getDeclID()) {
222 if (!(B->isStatic() && B->isTemporary()))
223 return true;
224
225 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
226 Val: B->getDescriptor()->asExpr());
227 if (!MTE)
228 return true;
229
230 // FIXME(perf): Since we do this check on every Load from a static
231 // temporary, it might make sense to cache the value of the
232 // isUsableInConstantExpressions call.
233 if (B->getEvalID() != S.Ctx.getEvalID() &&
234 !MTE->isUsableInConstantExpressions(Context: S.getASTContext())) {
235 const SourceInfo &E = S.Current->getSource(PC: OpPC);
236 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
237 S.Note(Loc: B->getDescriptor()->getLocation(),
238 DiagId: diag::note_constexpr_temporary_here);
239 return false;
240 }
241 }
242 return true;
243}
244
245static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
246 if (auto ID = Ptr.getDeclID()) {
247 if (!Ptr.isStatic())
248 return true;
249
250 if (S.P.getCurrentDecl() == ID)
251 return true;
252
253 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
254 return false;
255 }
256 return true;
257}
258
259namespace clang {
260namespace interp {
261static void popArg(InterpState &S, const Expr *Arg) {
262 PrimType Ty = S.getContext().classify(E: Arg).value_or(PT: PT_Ptr);
263 TYPE_SWITCH(Ty, S.Stk.discard<T>());
264}
265
266void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
267 const Function *Func) {
268 assert(S.Current);
269 assert(Func);
270
271 if (S.Current->Caller && Func->isVariadic()) {
272 // CallExpr we're look for is at the return PC of the current function, i.e.
273 // in the caller.
274 // This code path should be executed very rarely.
275 unsigned NumVarArgs;
276 const Expr *const *Args = nullptr;
277 unsigned NumArgs = 0;
278 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
279 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
280 Args = CE->getArgs();
281 NumArgs = CE->getNumArgs();
282 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
283 Args = CE->getArgs();
284 NumArgs = CE->getNumArgs();
285 } else
286 assert(false && "Can't get arguments from that expression type");
287
288 assert(NumArgs >= Func->getNumWrittenParams());
289 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
290 isa<CXXOperatorCallExpr>(Val: CallSite));
291 for (unsigned I = 0; I != NumVarArgs; ++I) {
292 const Expr *A = Args[NumArgs - 1 - I];
293 popArg(S, Arg: A);
294 }
295 }
296
297 // And in any case, remove the fixed parameters (the non-variadic ones)
298 // at the end.
299 for (const Function::ParamDescriptor &PDesc : Func->args_reverse())
300 TYPE_SWITCH(PDesc.T, S.Stk.discard<T>());
301}
302
303bool isConstexprUnknown(const Pointer &P) {
304 if (!P.isBlockPointer())
305 return false;
306
307 if (P.isDummy())
308 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
309
310 return P.getDeclDesc()->IsConstexprUnknown;
311}
312
313bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
314 if (Ptr.isDummy())
315 return false;
316 if (Ptr.isZero())
317 return true;
318 if (Ptr.isFunctionPointer())
319 return false;
320 if (Ptr.isIntegralPointer())
321 return true;
322 if (Ptr.isTypeidPointer())
323 return true;
324
325 if (Ptr.getType()->isAnyComplexType())
326 return true;
327
328 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
329 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
330 return false;
331}
332
333bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
334 AccessKinds AK, bool WillActivate) {
335 if (Ptr.isActive())
336 return true;
337
338 assert(Ptr.inUnion());
339
340 // Find the outermost union.
341 Pointer U = Ptr.getBase();
342 Pointer C = Ptr;
343 while (!U.isRoot() && !U.isActive()) {
344 // A little arbitrary, but this is what the current interpreter does.
345 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
346 // GCC's output is more similar to what we would get without
347 // this condition.
348 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
349 break;
350
351 C = U;
352 U = U.getBase();
353 }
354 assert(C.isField());
355 assert(C.getBase() == U);
356
357 // Consider:
358 // union U {
359 // struct {
360 // int x;
361 // int y;
362 // } a;
363 // }
364 //
365 // When activating x, we will also activate a. If we now try to read
366 // from y, we will get to CheckActive, because y is not active. In that
367 // case, our U will be a (not a union). We return here and let later code
368 // handle this.
369 if (!U.getFieldDesc()->isUnion())
370 return true;
371
372 // When we will activate Ptr, check that none of the unions in its path have a
373 // non-trivial default constructor.
374 if (WillActivate) {
375 bool Fails = false;
376 Pointer It = Ptr;
377 while (!It.isRoot() && !It.isActive()) {
378 if (const Record *R = It.getRecord(); R && R->isUnion()) {
379 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(Val: R->getDecl());
380 CXXRD && !CXXRD->hasTrivialDefaultConstructor()) {
381 Fails = true;
382 break;
383 }
384 }
385 It = It.getBase();
386 }
387 if (!Fails)
388 return true;
389 }
390
391 // Get the inactive field descriptor.
392 assert(!C.isActive());
393 const FieldDecl *InactiveField = C.getField();
394 assert(InactiveField);
395
396 // Find the active field of the union.
397 const Record *R = U.getRecord();
398 assert(R && R->isUnion() && "Not a union");
399
400 const FieldDecl *ActiveField = nullptr;
401 for (const Record::Field &F : R->fields()) {
402 const Pointer &Field = U.atField(Off: F.Offset);
403 if (Field.isActive()) {
404 ActiveField = Field.getField();
405 break;
406 }
407 }
408
409 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
410 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
411 << AK << InactiveField << !ActiveField << ActiveField;
412 return false;
413}
414
415bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
416 if (!Ptr.isExtern())
417 return true;
418
419 if (!Ptr.isPastEnd() &&
420 (Ptr.isInitialized() ||
421 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)))
422 return true;
423
424 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
425 Ptr.isConst())
426 return false;
427
428 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
429 diagnoseNonConstVariable(S, OpPC, VD);
430 return false;
431}
432
433bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
434 if (!Ptr.isUnknownSizeArray())
435 return true;
436 const SourceInfo &E = S.Current->getSource(PC: OpPC);
437 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
438 return false;
439}
440
441bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
442 AccessKinds AK) {
443 if (Ptr.isZero()) {
444 const auto &Src = S.Current->getSource(PC: OpPC);
445
446 if (Ptr.isField())
447 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
448 else
449 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
450
451 return false;
452 }
453
454 if (!Ptr.isLive()) {
455 const auto &Src = S.Current->getSource(PC: OpPC);
456
457 if (Ptr.isDynamic()) {
458 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
459 } else if (!S.checkingPotentialConstantExpression()) {
460 bool IsTemp = Ptr.isTemporary();
461 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_uninit)
462 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
463
464 if (IsTemp)
465 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
466 else
467 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
468 }
469
470 return false;
471 }
472
473 return true;
474}
475
476bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
477 assert(Desc);
478
479 const auto *D = Desc->asVarDecl();
480 if (!D || D == S.EvaluatingDecl || D->isConstexpr())
481 return true;
482
483 // If we're evaluating the initializer for a constexpr variable in C23, we may
484 // only read other contexpr variables. Abort here since this one isn't
485 // constexpr.
486 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
487 VD && VD->isConstexpr() && S.getLangOpts().C23)
488 return Invalid(S, OpPC);
489
490 QualType T = D->getType();
491 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
492 if (T->isIntegralOrEnumerationType()) {
493 if (!IsConstant) {
494 diagnoseNonConstVariable(S, OpPC, VD: D);
495 return false;
496 }
497 return true;
498 }
499
500 if (IsConstant) {
501 if (S.getLangOpts().CPlusPlus) {
502 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
503 DiagId: S.getLangOpts().CPlusPlus11
504 ? diag::note_constexpr_ltor_non_constexpr
505 : diag::note_constexpr_ltor_non_integral,
506 ExtraNotes: 1)
507 << D << T;
508 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
509 } else {
510 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
511 }
512 return true;
513 }
514
515 if (T->isPointerOrReferenceType()) {
516 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
517 !S.getLangOpts().CPlusPlus11) {
518 diagnoseNonConstVariable(S, OpPC, VD: D);
519 return false;
520 }
521 return true;
522 }
523
524 diagnoseNonConstVariable(S, OpPC, VD: D);
525 return false;
526}
527
528static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
529 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
530 return true;
531 if (!Ptr.getDeclID())
532 return true;
533 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
534}
535
536bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
537 CheckSubobjectKind CSK) {
538 if (!Ptr.isZero())
539 return true;
540 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
541 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
542 << CSK << S.Current->getRange(PC: OpPC);
543
544 return false;
545}
546
547bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
548 AccessKinds AK) {
549 if (!Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray())
550 return true;
551 if (S.getLangOpts().CPlusPlus) {
552 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
553 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
554 << AK << S.Current->getRange(PC: OpPC);
555 }
556 return false;
557}
558
559bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
560 CheckSubobjectKind CSK) {
561 if (!Ptr.isElementPastEnd() && !Ptr.isZeroSizeArray())
562 return true;
563 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
564 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
565 << CSK << S.Current->getRange(PC: OpPC);
566 return false;
567}
568
569bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
570 CheckSubobjectKind CSK) {
571 if (!Ptr.isOnePastEnd())
572 return true;
573
574 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
575 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
576 << CSK << S.Current->getRange(PC: OpPC);
577 return false;
578}
579
580bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
581 uint32_t Offset) {
582 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
583 uint32_t PtrOffset = Ptr.getByteOffset();
584
585 // We subtract Offset from PtrOffset. The result must be at least
586 // MinOffset.
587 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
588 return true;
589
590 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
591 QualType TargetQT = E->getType()->getPointeeType();
592 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
593
594 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
595 << MostDerivedQT << TargetQT;
596
597 return false;
598}
599
600bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
601 assert(Ptr.isLive() && "Pointer is not live");
602 if (!Ptr.isConst())
603 return true;
604
605 if (Ptr.isMutable() && !Ptr.isConstInMutable())
606 return true;
607
608 if (!Ptr.isBlockPointer())
609 return false;
610
611 // The This pointer is writable in constructors and destructors,
612 // even if isConst() returns true.
613 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
614 return true;
615
616 const QualType Ty = Ptr.getType();
617 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
618 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
619 return false;
620}
621
622bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
623 assert(Ptr.isLive() && "Pointer is not live");
624 if (!Ptr.isMutable())
625 return true;
626
627 // In C++14 onwards, it is permitted to read a mutable member whose
628 // lifetime began within the evaluation.
629 if (S.getLangOpts().CPlusPlus14 &&
630 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) {
631 // FIXME: This check is necessary because (of the way) we revisit
632 // variables in Compiler.cpp:visitDeclRef. Revisiting a so far
633 // unknown variable will get the same EvalID and we end up allowing
634 // reads from mutable members of it.
635 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
636 return false;
637 return true;
638 }
639
640 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
641 const FieldDecl *Field = Ptr.getField();
642 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
643 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
644 return false;
645}
646
647static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
648 AccessKinds AK) {
649 assert(Ptr.isLive());
650
651 if (!Ptr.isVolatile())
652 return true;
653
654 if (!S.getLangOpts().CPlusPlus)
655 return Invalid(S, OpPC);
656
657 // Volatile object can be written-to and read if they are being constructed.
658 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
659 return true;
660
661 // The reason why Ptr is volatile might be further up the hierarchy.
662 // Find that pointer.
663 Pointer P = Ptr;
664 while (!P.isRoot()) {
665 if (P.getType().isVolatileQualified())
666 break;
667 P = P.getBase();
668 }
669
670 const NamedDecl *ND = nullptr;
671 int DiagKind;
672 SourceLocation Loc;
673 if (const auto *F = P.getField()) {
674 DiagKind = 2;
675 Loc = F->getLocation();
676 ND = F;
677 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
678 DiagKind = 1;
679 Loc = VD->getLocation();
680 ND = VD;
681 } else {
682 DiagKind = 0;
683 if (const auto *E = P.getFieldDesc()->asExpr())
684 Loc = E->getExprLoc();
685 }
686
687 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
688 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
689 << AK << DiagKind << ND;
690 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
691 return false;
692}
693
694bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
695 AccessKinds AK) {
696 assert(Ptr.isLive());
697 assert(!Ptr.isInitialized());
698 return DiagnoseUninitialized(S, OpPC, Extern: Ptr.isExtern(), Desc: Ptr.getDeclDesc(), AK);
699}
700
701bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, bool Extern,
702 const Descriptor *Desc, AccessKinds AK) {
703 if (Extern && S.checkingPotentialConstantExpression())
704 return false;
705
706 if (const auto *VD = Desc->asVarDecl();
707 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
708
709 if (VD == S.EvaluatingDecl &&
710 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
711 if (!S.getLangOpts().CPlusPlus14 &&
712 !VD->getType().isConstant(Ctx: S.getASTContext())) {
713 // Diagnose as non-const read.
714 diagnoseNonConstVariable(S, OpPC, VD);
715 } else {
716 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
717 // Diagnose as "read of object outside its lifetime".
718 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
719 << AK << /*IsIndeterminate=*/false;
720 }
721 return false;
722 }
723
724 if (VD->getAnyInitializer()) {
725 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
726 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
727 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
728 } else {
729 diagnoseMissingInitializer(S, OpPC, VD);
730 }
731 return false;
732 }
733
734 if (!S.checkingPotentialConstantExpression()) {
735 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
736 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
737 }
738 return false;
739}
740
741static bool CheckLifetime(InterpState &S, CodePtr OpPC, Lifetime LT,
742 AccessKinds AK) {
743 if (LT == Lifetime::Started)
744 return true;
745
746 if (!S.checkingPotentialConstantExpression()) {
747 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
748 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
749 }
750 return false;
751}
752
753static bool CheckWeak(InterpState &S, CodePtr OpPC, const Block *B) {
754 if (!B->isWeak())
755 return true;
756
757 const auto *VD = B->getDescriptor()->asVarDecl();
758 assert(VD);
759 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
760 << VD;
761 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
762
763 return false;
764}
765
766// The list of checks here is just the one from CheckLoad, but with the
767// ones removed that are impossible on primitive global values.
768// For example, since those can't be members of structs, they also can't
769// be mutable.
770bool CheckGlobalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
771 const auto &Desc = B->getBlockDesc<GlobalInlineDescriptor>();
772 if (!B->isAccessible()) {
773 if (!CheckExtern(S, OpPC, Ptr: Pointer(const_cast<Block *>(B))))
774 return false;
775 if (!CheckDummy(S, OpPC, B, AK: AK_Read))
776 return false;
777 return CheckWeak(S, OpPC, B);
778 }
779
780 if (!CheckConstant(S, OpPC, Desc: B->getDescriptor()))
781 return false;
782 if (Desc.InitState != GlobalInitState::Initialized)
783 return DiagnoseUninitialized(S, OpPC, Extern: B->isExtern(), Desc: B->getDescriptor(),
784 AK: AK_Read);
785 if (!CheckTemporary(S, OpPC, B, AK: AK_Read))
786 return false;
787 if (B->getDescriptor()->IsVolatile) {
788 if (!S.getLangOpts().CPlusPlus)
789 return Invalid(S, OpPC);
790
791 const ValueDecl *D = B->getDescriptor()->asValueDecl();
792 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
793 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
794 << AK_Read << 1 << D;
795 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
796 return false;
797 }
798 return true;
799}
800
801// Similarly, for local loads.
802bool CheckLocalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
803 assert(!B->isExtern());
804 const auto &Desc = *reinterpret_cast<const InlineDescriptor *>(B->rawData());
805 if (!CheckLifetime(S, OpPC, LT: Desc.LifeState, AK: AK_Read))
806 return false;
807 if (!Desc.IsInitialized)
808 return DiagnoseUninitialized(S, OpPC, /*Extern=*/false, Desc: B->getDescriptor(),
809 AK: AK_Read);
810 if (B->getDescriptor()->IsVolatile) {
811 if (!S.getLangOpts().CPlusPlus)
812 return Invalid(S, OpPC);
813
814 const ValueDecl *D = B->getDescriptor()->asValueDecl();
815 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
816 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
817 << AK_Read << 1 << D;
818 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
819 return false;
820 }
821 return true;
822}
823
824bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
825 AccessKinds AK) {
826 if (Ptr.isZero()) {
827 const auto &Src = S.Current->getSource(PC: OpPC);
828
829 if (Ptr.isField())
830 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
831 else
832 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
833 return false;
834 }
835 // Block pointers are the only ones we can actually read from.
836 if (!Ptr.isBlockPointer())
837 return false;
838
839 if (!Ptr.block()->isAccessible()) {
840 if (!CheckLive(S, OpPC, Ptr, AK))
841 return false;
842 if (!CheckExtern(S, OpPC, Ptr))
843 return false;
844 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK))
845 return false;
846 return CheckWeak(S, OpPC, B: Ptr.block());
847 }
848
849 if (!CheckConstant(S, OpPC, Ptr))
850 return false;
851 if (!CheckRange(S, OpPC, Ptr, AK))
852 return false;
853 if (!CheckActive(S, OpPC, Ptr, AK))
854 return false;
855 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK))
856 return false;
857 if (!Ptr.isInitialized())
858 return DiagnoseUninitialized(S, OpPC, Ptr, AK);
859 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK))
860 return false;
861
862 if (!CheckMutable(S, OpPC, Ptr))
863 return false;
864 if (!CheckVolatile(S, OpPC, Ptr, AK))
865 return false;
866 if (!Ptr.isConst() && !S.inConstantContext() && isConstexprUnknown(P: Ptr))
867 return false;
868 return true;
869}
870
871/// This is not used by any of the opcodes directly. It's used by
872/// EvalEmitter to do the final lvalue-to-rvalue conversion.
873bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
874 assert(!Ptr.isZero());
875 if (!Ptr.isBlockPointer())
876 return false;
877
878 if (!Ptr.block()->isAccessible()) {
879 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
880 return false;
881 if (!CheckExtern(S, OpPC, Ptr))
882 return false;
883 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Read))
884 return false;
885 return CheckWeak(S, OpPC, B: Ptr.block());
886 }
887
888 if (!CheckConstant(S, OpPC, Ptr))
889 return false;
890
891 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
892 return false;
893 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Read))
894 return false;
895 if (!Ptr.isInitialized())
896 return DiagnoseUninitialized(S, OpPC, Ptr, AK: AK_Read);
897 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Read))
898 return false;
899 if (!CheckMutable(S, OpPC, Ptr))
900 return false;
901 return true;
902}
903
904bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
905 bool WillBeActivated) {
906 if (!Ptr.isBlockPointer() || Ptr.isZero())
907 return false;
908
909 if (!Ptr.block()->isAccessible()) {
910 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
911 return false;
912 if (!CheckExtern(S, OpPC, Ptr))
913 return false;
914 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Assign);
915 }
916 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Assign))
917 return false;
918 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
919 return false;
920 if (!CheckActive(S, OpPC, Ptr, AK: AK_Assign, WillActivate: WillBeActivated))
921 return false;
922 if (!CheckGlobal(S, OpPC, Ptr))
923 return false;
924 if (!CheckConst(S, OpPC, Ptr))
925 return false;
926 if (!CheckVolatile(S, OpPC, Ptr, AK: AK_Assign))
927 return false;
928 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
929 return false;
930 return true;
931}
932
933static bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
934 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
935 return false;
936 if (!Ptr.isDummy()) {
937 if (!CheckExtern(S, OpPC, Ptr))
938 return false;
939 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
940 return false;
941 }
942 return true;
943}
944
945bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
946 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
947 return false;
948 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
949 return false;
950 return true;
951}
952
953static bool diagnoseCallableDecl(InterpState &S, CodePtr OpPC,
954 const FunctionDecl *DiagDecl) {
955 // Bail out if the function declaration itself is invalid. We will
956 // have produced a relevant diagnostic while parsing it, so just
957 // note the problematic sub-expression.
958 if (DiagDecl->isInvalidDecl())
959 return Invalid(S, OpPC);
960
961 // Diagnose failed assertions specially.
962 if (S.Current->getLocation(PC: OpPC).isMacroID() && DiagDecl->getIdentifier()) {
963 // FIXME: Instead of checking for an implementation-defined function,
964 // check and evaluate the assert() macro.
965 StringRef Name = DiagDecl->getName();
966 bool AssertFailed =
967 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
968 if (AssertFailed) {
969 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
970 DiagId: diag::note_constexpr_assert_failed);
971 return false;
972 }
973 }
974
975 if (!S.getLangOpts().CPlusPlus11) {
976 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
977 DiagId: diag::note_invalid_subexpr_in_const_expr);
978 return false;
979 }
980
981 // Invalid decls have been diagnosed before.
982 if (DiagDecl->isInvalidDecl())
983 return false;
984
985 // If this function is not constexpr because it is an inherited
986 // non-constexpr constructor, diagnose that directly.
987 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
988 if (CD && CD->isInheritingConstructor()) {
989 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
990 if (!Inherited->isConstexpr())
991 DiagDecl = CD = Inherited;
992 }
993
994 // Silently reject constructors of invalid classes. The invalid class
995 // has been rejected elsewhere before.
996 if (CD && CD->getParent()->isInvalidDecl())
997 return false;
998
999 // FIXME: If DiagDecl is an implicitly-declared special member function
1000 // or an inheriting constructor, we should be much more explicit about why
1001 // it's not constexpr.
1002 if (CD && CD->isInheritingConstructor()) {
1003 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_invalid_inhctor,
1004 ExtraNotes: 1)
1005 << CD->getInheritedConstructor().getConstructor()->getParent();
1006 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
1007 } else {
1008 // Don't emit anything if the function isn't defined and we're checking
1009 // for a constant expression. It might be defined at the point we're
1010 // actually calling it.
1011 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
1012 bool IsDefined = DiagDecl->isDefined();
1013 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
1014 S.checkingPotentialConstantExpression())
1015 return false;
1016
1017 // If the declaration is defined, declared 'constexpr' _and_ has a body,
1018 // the below diagnostic doesn't add anything useful.
1019 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && DiagDecl->hasBody())
1020 return false;
1021
1022 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
1023 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
1024 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
1025
1026 if (DiagDecl->getDefinition())
1027 S.Note(Loc: DiagDecl->getDefinition()->getLocation(), DiagId: diag::note_declared_at);
1028 else
1029 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
1030 }
1031
1032 return false;
1033}
1034
1035static bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
1036 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
1037 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1038 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
1039 return false;
1040 }
1041
1042 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1043 return false;
1044
1045 if (F->isValid() && F->hasBody() &&
1046 (F->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1047 F->getDecl()->hasAttr<MSConstexprAttr>())))
1048 return true;
1049
1050 const FunctionDecl *DiagDecl = F->getDecl();
1051 const FunctionDecl *Definition = nullptr;
1052 DiagDecl->getBody(Definition);
1053
1054 if (!Definition && S.checkingPotentialConstantExpression() &&
1055 DiagDecl->isConstexpr()) {
1056 return false;
1057 }
1058
1059 // Implicitly constexpr.
1060 if (F->isLambdaStaticInvoker())
1061 return true;
1062
1063 return diagnoseCallableDecl(S, OpPC, DiagDecl);
1064}
1065
1066static bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
1067 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
1068 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1069 DiagId: diag::note_constexpr_depth_limit_exceeded)
1070 << S.getLangOpts().ConstexprCallDepth;
1071 return false;
1072 }
1073
1074 return true;
1075}
1076
1077bool CheckThis(InterpState &S, CodePtr OpPC) {
1078 if (S.Current->hasThisPointer())
1079 return true;
1080
1081 const Expr *E = S.Current->getExpr(PC: OpPC);
1082 if (S.getLangOpts().CPlusPlus11) {
1083 bool IsImplicit = false;
1084 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
1085 IsImplicit = TE->isImplicit();
1086 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
1087 } else {
1088 S.FFDiag(E);
1089 }
1090
1091 return false;
1092}
1093
1094bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
1095 APFloat::opStatus Status, FPOptions FPO) {
1096 // [expr.pre]p4:
1097 // If during the evaluation of an expression, the result is not
1098 // mathematically defined [...], the behavior is undefined.
1099 // FIXME: C++ rules require us to not conform to IEEE 754 here.
1100 if (Result.isNan()) {
1101 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1102 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
1103 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
1104 return S.noteUndefinedBehavior();
1105 }
1106
1107 // In a constant context, assume that any dynamic rounding mode or FP
1108 // exception state matches the default floating-point environment.
1109 if (S.inConstantContext())
1110 return true;
1111
1112 if ((Status & APFloat::opInexact) &&
1113 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
1114 // Inexact result means that it depends on rounding mode. If the requested
1115 // mode is dynamic, the evaluation cannot be made in compile time.
1116 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1117 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
1118 return false;
1119 }
1120
1121 if ((Status != APFloat::opOK) &&
1122 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
1123 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
1124 FPO.getAllowFEnvAccess())) {
1125 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1126 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1127 return false;
1128 }
1129
1130 if ((Status & APFloat::opStatus::opInvalidOp) &&
1131 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1132 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1133 // There is no usefully definable result.
1134 S.FFDiag(SI: E);
1135 return false;
1136 }
1137
1138 return true;
1139}
1140
1141bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1142 if (S.getLangOpts().CPlusPlus20)
1143 return true;
1144
1145 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1146 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1147 return true;
1148}
1149
1150bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1151 DynamicAllocator::Form AllocForm,
1152 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1153 const Expr *NewExpr) {
1154 if (AllocForm == DeleteForm)
1155 return true;
1156
1157 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1158
1159 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1160 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1161 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1162 << TypeToDiagnose;
1163 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1164 << NewExpr->getSourceRange();
1165 return false;
1166}
1167
1168bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1169 const Pointer &Ptr) {
1170 // Regular new type(...) call.
1171 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1172 return true;
1173 // operator new.
1174 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1175 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1176 return true;
1177 // std::allocator.allocate() call
1178 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1179 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1180 return true;
1181
1182 // Whatever this is, we didn't heap allocate it.
1183 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1184 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1185 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1186
1187 if (Ptr.isTemporary())
1188 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1189 else
1190 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1191 return false;
1192}
1193
1194/// We aleady know the given DeclRefExpr is invalid for some reason,
1195/// now figure out why and print appropriate diagnostics.
1196bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1197 const ValueDecl *D = DR->getDecl();
1198 return diagnoseUnknownDecl(S, OpPC, D);
1199}
1200
1201bool InvalidDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR,
1202 bool InitializerFailed) {
1203 assert(DR);
1204
1205 if (InitializerFailed) {
1206 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1207 const auto *VD = cast<VarDecl>(Val: DR->getDecl());
1208 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
1209 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
1210 return false;
1211 }
1212
1213 return CheckDeclRef(S, OpPC, DR);
1214}
1215
1216bool CheckDummy(InterpState &S, CodePtr OpPC, const Block *B, AccessKinds AK) {
1217 if (!B->isDummy())
1218 return true;
1219
1220 const ValueDecl *D = B->getDescriptor()->asValueDecl();
1221 if (!D)
1222 return false;
1223
1224 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1225 return diagnoseUnknownDecl(S, OpPC, D);
1226
1227 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1228 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1229 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1230 }
1231 return false;
1232}
1233
1234static bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1235 const CallExpr *CE, unsigned ArgSize) {
1236 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1237 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1238 unsigned Offset = 0;
1239 unsigned Index = 0;
1240 for (const Expr *Arg : Args) {
1241 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1242 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1243 if (ArgPtr.isZero()) {
1244 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1245 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1246 return false;
1247 }
1248 }
1249
1250 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(PT: PT_Ptr)));
1251 ++Index;
1252 }
1253 return true;
1254}
1255
1256static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1257 const Pointer &BasePtr,
1258 const Descriptor *Desc) {
1259 assert(Desc->isRecord());
1260 const Record *R = Desc->ElemRecord;
1261 assert(R);
1262
1263 if (!S.Current->isBottomFrame() && S.Current->hasThisPointer() &&
1264 S.Current->getFunction()->isDestructor() &&
1265 Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis())) {
1266 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1267 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1268 return false;
1269 }
1270
1271 // Destructor of this record.
1272 const CXXDestructorDecl *Dtor = R->getDestructor();
1273 assert(Dtor);
1274 assert(!Dtor->isTrivial());
1275 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1276 if (!DtorFunc)
1277 return false;
1278
1279 S.Stk.push<Pointer>(Args: BasePtr);
1280 return Call(S, OpPC, Func: DtorFunc, VarArgSize: 0);
1281}
1282
1283static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1284 assert(B);
1285 const Descriptor *Desc = B->getDescriptor();
1286
1287 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1288 return true;
1289
1290 assert(Desc->isRecord() || Desc->isCompositeArray());
1291
1292 if (Desc->hasTrivialDtor())
1293 return true;
1294
1295 if (Desc->isCompositeArray()) {
1296 unsigned N = Desc->getNumElems();
1297 if (N == 0)
1298 return true;
1299 const Descriptor *ElemDesc = Desc->ElemDesc;
1300 assert(ElemDesc->isRecord());
1301
1302 Pointer RP(const_cast<Block *>(B));
1303 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1304 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1305 return false;
1306 }
1307 return true;
1308 }
1309
1310 assert(Desc->isRecord());
1311 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1312}
1313
1314static bool hasVirtualDestructor(QualType T) {
1315 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1316 if (const CXXDestructorDecl *DD = RD->getDestructor())
1317 return DD->isVirtual();
1318 return false;
1319}
1320
1321bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1322 bool IsGlobalDelete) {
1323 if (!CheckDynamicMemoryAllocation(S, OpPC))
1324 return false;
1325
1326 DynamicAllocator &Allocator = S.getAllocator();
1327
1328 const Expr *Source = nullptr;
1329 const Block *BlockToDelete = nullptr;
1330 {
1331 // Extra scope for this so the block doesn't have this pointer
1332 // pointing to it when we destroy it.
1333 Pointer Ptr = S.Stk.pop<Pointer>();
1334
1335 // Deleteing nullptr is always fine.
1336 if (Ptr.isZero())
1337 return true;
1338
1339 // Remove base casts.
1340 QualType InitialType = Ptr.getType();
1341 Ptr = Ptr.stripBaseCasts();
1342
1343 Source = Ptr.getDeclDesc()->asExpr();
1344 BlockToDelete = Ptr.block();
1345
1346 // Check that new[]/delete[] or new/delete were used, not a mixture.
1347 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1348 if (std::optional<DynamicAllocator::Form> AllocForm =
1349 Allocator.getAllocationForm(Source)) {
1350 DynamicAllocator::Form DeleteForm =
1351 DeleteIsArrayForm ? DynamicAllocator::Form::Array
1352 : DynamicAllocator::Form::NonArray;
1353 if (!CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1354 NewExpr: Source))
1355 return false;
1356 }
1357
1358 // For the non-array case, the types must match if the static type
1359 // does not have a virtual destructor.
1360 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1361 !hasVirtualDestructor(T: InitialType)) {
1362 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1363 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1364 << InitialType << Ptr.getType();
1365 return false;
1366 }
1367
1368 if (!Ptr.isRoot() || (Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray()) ||
1369 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1370 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1371 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1372 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1373 return false;
1374 }
1375
1376 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1377 return false;
1378
1379 // For a class type with a virtual destructor, the selected operator delete
1380 // is the one looked up when building the destructor.
1381 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1382 QualType AllocType = Ptr.getType();
1383 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1384 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1385 if (const CXXDestructorDecl *DD = RD->getDestructor())
1386 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1387 return nullptr;
1388 };
1389
1390 if (const FunctionDecl *VirtualDelete =
1391 getVirtualOperatorDelete(AllocType);
1392 VirtualDelete &&
1393 !VirtualDelete
1394 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1395 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1396 DiagId: diag::note_constexpr_new_non_replaceable)
1397 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1398 return false;
1399 }
1400 }
1401 }
1402 assert(Source);
1403 assert(BlockToDelete);
1404
1405 // Invoke destructors before deallocating the memory.
1406 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1407 return false;
1408
1409 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1410 // Nothing has been deallocated, this must be a double-delete.
1411 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1412 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1413 return false;
1414 }
1415
1416 return true;
1417}
1418
1419void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1420 const APSInt &Value) {
1421 llvm::APInt Min;
1422 llvm::APInt Max;
1423 ED->getValueRange(Max, Min);
1424 --Max;
1425
1426 if (ED->getNumNegativeBits() &&
1427 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1428 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1429 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1430 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1431 << ED;
1432 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1433 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1434 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1435 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1436 << ED;
1437 }
1438}
1439
1440bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1441 assert(T);
1442 assert(!S.getLangOpts().CPlusPlus23);
1443
1444 // C++1y: A constant initializer for an object o [...] may also invoke
1445 // constexpr constructors for o and its subobjects even if those objects
1446 // are of non-literal class types.
1447 //
1448 // C++11 missed this detail for aggregates, so classes like this:
1449 // struct foo_t { union { int i; volatile int j; } u; };
1450 // are not (obviously) initializable like so:
1451 // __attribute__((__require_constant_initialization__))
1452 // static const foo_t x = {{0}};
1453 // because "i" is a subobject with non-literal initialization (due to the
1454 // volatile member of the union). See:
1455 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1456 // Therefore, we use the C++1y behavior.
1457
1458 if (!S.Current->isBottomFrame() &&
1459 S.Current->getFunction()->isConstructor() &&
1460 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1461 return true;
1462 }
1463
1464 const Expr *E = S.Current->getExpr(PC: OpPC);
1465 if (S.getLangOpts().CPlusPlus11)
1466 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1467 else
1468 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1469 return false;
1470}
1471
1472static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1473 uint32_t Off) {
1474 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1475 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1476 return false;
1477
1478 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1479 return false;
1480 if (!CheckArray(S, OpPC, Ptr))
1481 return false;
1482 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1483 return false;
1484
1485 if (Ptr.isIntegralPointer()) {
1486 if (std::optional<IntPointer> IntPtr =
1487 Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off)) {
1488 S.Stk.push<Pointer>(Args: std::move(*IntPtr));
1489 return true;
1490 }
1491 return false;
1492 }
1493
1494 if (!Ptr.isBlockPointer()) {
1495 // FIXME: The only time we (seem to) get here is when trying to access a
1496 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1497 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1498 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1499 DiagId: diag::note_constexpr_access_unreadable_object)
1500 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1501 return false;
1502 }
1503
1504 // We can't get the field of something that's not a record.
1505 if (!Ptr.getFieldDesc()->isRecord())
1506 return false;
1507
1508 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1509 return false;
1510
1511 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1512 return true;
1513}
1514
1515bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1516 const auto &Ptr = S.Stk.peek<Pointer>();
1517 return getField(S, OpPC, Ptr, Off);
1518}
1519
1520bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1521 const auto &Ptr = S.Stk.pop<Pointer>();
1522 return getField(S, OpPC, Ptr, Off);
1523}
1524
1525static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1526 const Pointer &ThisPtr) {
1527 assert(Func->isConstructor());
1528
1529 if (Func->getParentDecl()->isInvalidDecl())
1530 return false;
1531
1532 const Descriptor *D = ThisPtr.getFieldDesc();
1533 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1534 // subobject of a composite array.
1535 if (!D->ElemRecord)
1536 return true;
1537
1538 if (D->ElemRecord->getNumVirtualBases() == 0)
1539 return true;
1540
1541 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1542 << Func->getParentDecl();
1543 return false;
1544}
1545
1546bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1547 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1548 return false;
1549 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1550 return false;
1551 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1552 return false;
1553 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Destroy))
1554 return false;
1555
1556 // Can't call a dtor on a global variable.
1557 if (Ptr.block()->isStatic()) {
1558 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1559 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1560 return false;
1561 }
1562 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1563}
1564
1565/// Opcode. Check if the function decl can be called at compile time.
1566bool CheckFunctionDecl(InterpState &S, CodePtr OpPC, const FunctionDecl *FD) {
1567 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1568 return false;
1569
1570 const FunctionDecl *Definition = nullptr;
1571 const Stmt *Body = FD->getBody(Definition);
1572
1573 if (Definition && Body &&
1574 (Definition->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1575 Definition->hasAttr<MSConstexprAttr>())))
1576 return true;
1577
1578 return diagnoseCallableDecl(S, OpPC, DiagDecl: FD);
1579}
1580
1581bool CheckBitCast(InterpState &S, CodePtr OpPC, const Type *TargetType,
1582 bool SrcIsVoidPtr) {
1583 const auto &Ptr = S.Stk.peek<Pointer>();
1584 if (Ptr.isZero())
1585 return true;
1586 if (!Ptr.isBlockPointer())
1587 return true;
1588
1589 if (TargetType->isIntegerType())
1590 return true;
1591
1592 if (SrcIsVoidPtr && S.getLangOpts().CPlusPlus) {
1593 bool HasValidResult = !Ptr.isZero();
1594
1595 if (HasValidResult) {
1596 if (S.getStdAllocatorCaller(Name: "allocate"))
1597 return true;
1598
1599 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
1600 if (S.getLangOpts().CPlusPlus26 &&
1601 S.getASTContext().hasSimilarType(T1: Ptr.getType(),
1602 T2: QualType(TargetType, 0)))
1603 return true;
1604
1605 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_void_star_cast)
1606 << E->getSubExpr()->getType() << S.getLangOpts().CPlusPlus26
1607 << Ptr.getType().getCanonicalType() << E->getType()->getPointeeType();
1608 } else if (!S.getLangOpts().CPlusPlus26) {
1609 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1610 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1611 << diag::ConstexprInvalidCastKind::CastFrom << "'void *'"
1612 << S.Current->getRange(PC: OpPC);
1613 }
1614 }
1615
1616 QualType PtrType = Ptr.getType();
1617 if (PtrType->isRecordType() &&
1618 PtrType->getAsRecordDecl() != TargetType->getAsRecordDecl()) {
1619 S.CCEDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_invalid_cast)
1620 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
1621 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1622 return false;
1623 }
1624 return true;
1625}
1626
1627static void compileFunction(InterpState &S, const Function *Func) {
1628 const FunctionDecl *Definition = Func->getDecl()->getDefinition();
1629 if (!Definition)
1630 return;
1631
1632 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1633 .compileFunc(FuncDecl: Definition, Func: const_cast<Function *>(Func));
1634}
1635
1636bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1637 uint32_t VarArgSize) {
1638 if (Func->hasThisPointer()) {
1639 size_t ArgSize = Func->getArgSize() + VarArgSize;
1640 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1641 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1642
1643 // If the current function is a lambda static invoker and
1644 // the function we're about to call is a lambda call operator,
1645 // skip the CheckInvoke, since the ThisPtr is a null pointer
1646 // anyway.
1647 if (!(S.Current->getFunction() &&
1648 S.Current->getFunction()->isLambdaStaticInvoker() &&
1649 Func->isLambdaCallOperator())) {
1650 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1651 return false;
1652 }
1653
1654 if (S.checkingPotentialConstantExpression())
1655 return false;
1656 }
1657
1658 if (!Func->isFullyCompiled())
1659 compileFunction(S, Func);
1660
1661 if (!CheckCallable(S, OpPC, F: Func))
1662 return false;
1663
1664 if (!CheckCallDepth(S, OpPC))
1665 return false;
1666
1667 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1668 InterpFrame *FrameBefore = S.Current;
1669 S.Current = NewFrame.get();
1670
1671 // Note that we cannot assert(CallResult.hasValue()) here since
1672 // Ret() above only sets the APValue if the curent frame doesn't
1673 // have a caller set.
1674 if (Interpret(S)) {
1675 NewFrame.release(); // Frame was delete'd already.
1676 assert(S.Current == FrameBefore);
1677 return true;
1678 }
1679
1680 // Interpreting the function failed somehow. Reset to
1681 // previous state.
1682 S.Current = FrameBefore;
1683 return false;
1684}
1685bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1686 uint32_t VarArgSize) {
1687
1688 // C doesn't have constexpr functions.
1689 if (!S.getLangOpts().CPlusPlus)
1690 return Invalid(S, OpPC);
1691
1692 assert(Func);
1693 auto cleanup = [&]() -> bool {
1694 cleanupAfterFunctionCall(S, OpPC, Func);
1695 return false;
1696 };
1697
1698 if (Func->hasThisPointer()) {
1699 size_t ArgSize = Func->getArgSize() + VarArgSize;
1700 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1701
1702 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1703
1704 // C++23 [expr.const]p5.6
1705 // an invocation of a virtual function ([class.virtual]) for an object whose
1706 // dynamic type is constexpr-unknown;
1707 if (ThisPtr.isDummy() && Func->isVirtual())
1708 return false;
1709
1710 // If the current function is a lambda static invoker and
1711 // the function we're about to call is a lambda call operator,
1712 // skip the CheckInvoke, since the ThisPtr is a null pointer
1713 // anyway.
1714 if (S.Current->getFunction() &&
1715 S.Current->getFunction()->isLambdaStaticInvoker() &&
1716 Func->isLambdaCallOperator()) {
1717 assert(ThisPtr.isZero());
1718 } else {
1719 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1720 return cleanup();
1721 if (!Func->isConstructor() && !Func->isDestructor() &&
1722 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1723 return false;
1724 }
1725
1726 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1727 return false;
1728 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1729 return false;
1730
1731 if (Func->isConstructor() || Func->isDestructor())
1732 S.InitializingBlocks.push_back(Elt: ThisPtr.block());
1733 }
1734
1735 if (!Func->isFullyCompiled())
1736 compileFunction(S, Func);
1737
1738 if (!CheckCallable(S, OpPC, F: Func))
1739 return cleanup();
1740
1741 // Do not evaluate any function calls in checkingPotentialConstantExpression
1742 // mode. Constructors will be aborted later when their initializers are
1743 // evaluated.
1744 if (S.checkingPotentialConstantExpression() && !Func->isConstructor())
1745 return false;
1746
1747 if (!CheckCallDepth(S, OpPC))
1748 return cleanup();
1749
1750 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1751 InterpFrame *FrameBefore = S.Current;
1752 S.Current = NewFrame.get();
1753
1754 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1755 // Note that we cannot assert(CallResult.hasValue()) here since
1756 // Ret() above only sets the APValue if the curent frame doesn't
1757 // have a caller set.
1758 bool Success = Interpret(S);
1759 // Remove initializing block again.
1760 if (Func->isConstructor() || Func->isDestructor())
1761 S.InitializingBlocks.pop_back();
1762
1763 if (!Success) {
1764 // Interpreting the function failed somehow. Reset to
1765 // previous state.
1766 S.Current = FrameBefore;
1767 return false;
1768 }
1769
1770 NewFrame.release(); // Frame was delete'd already.
1771 assert(S.Current == FrameBefore);
1772 return true;
1773}
1774
1775static bool GetDynamicDecl(InterpState &S, CodePtr OpPC, Pointer TypePtr,
1776 const CXXRecordDecl *&DynamicDecl) {
1777 TypePtr = TypePtr.stripBaseCasts();
1778
1779 QualType DynamicType = TypePtr.getType();
1780 if (TypePtr.isStatic() || TypePtr.isConst()) {
1781 if (const VarDecl *VD = TypePtr.getDeclDesc()->asVarDecl();
1782 VD && !VD->isConstexpr()) {
1783 const Expr *E = S.Current->getExpr(PC: OpPC);
1784 APValue V = TypePtr.toAPValue(ASTCtx: S.getASTContext());
1785 QualType TT = S.getASTContext().getLValueReferenceType(T: DynamicType);
1786 S.FFDiag(E, DiagId: diag::note_constexpr_polymorphic_unknown_dynamic_type)
1787 << AccessKinds::AK_MemberCall << V.getAsString(Ctx: S.getASTContext(), Ty: TT);
1788 return false;
1789 }
1790 }
1791
1792 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) {
1793 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1794 } else if (DynamicType->isArrayType()) {
1795 const Type *ElemType = DynamicType->getPointeeOrArrayElementType();
1796 assert(ElemType);
1797 DynamicDecl = ElemType->getAsCXXRecordDecl();
1798 } else {
1799 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1800 }
1801 return true;
1802}
1803
1804bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1805 uint32_t VarArgSize) {
1806 assert(Func->hasThisPointer());
1807 assert(Func->isVirtual());
1808 size_t ArgSize = Func->getArgSize() + VarArgSize;
1809 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1810 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1811 const FunctionDecl *Callee = Func->getDecl();
1812
1813 const CXXRecordDecl *DynamicDecl = nullptr;
1814 if (!GetDynamicDecl(S, OpPC, TypePtr: ThisPtr, DynamicDecl))
1815 return false;
1816 assert(DynamicDecl);
1817
1818 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1819 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1820 const CXXMethodDecl *Overrider;
1821
1822 if (StaticDecl != DynamicDecl &&
1823 !llvm::is_contained(Range&: S.InitializingBlocks, Element: ThisPtr.block())) {
1824 if (!DynamicDecl->isDerivedFrom(Base: StaticDecl))
1825 return false;
1826 Overrider = S.getContext().getOverridingFunction(DynamicDecl, StaticDecl,
1827 InitialFunction);
1828
1829 } else {
1830 Overrider = InitialFunction;
1831 }
1832
1833 // C++2a [class.abstract]p6:
1834 // the effect of making a virtual call to a pure virtual function [...] is
1835 // undefined
1836 if (Overrider->isPureVirtual()) {
1837 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1838 ExtraNotes: 1)
1839 << Callee;
1840 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1841 return false;
1842 }
1843
1844 if (Overrider != InitialFunction) {
1845 // DR1872: An instantiated virtual constexpr function can't be called in a
1846 // constant expression (prior to C++20). We can still constant-fold such a
1847 // call.
1848 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1849 const Expr *E = S.Current->getExpr(PC: OpPC);
1850 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1851 }
1852
1853 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1854
1855 const CXXRecordDecl *ThisFieldDecl =
1856 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1857 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1858 // If the function we call is further DOWN the hierarchy than the
1859 // FieldDesc of our pointer, just go up the hierarchy of this field
1860 // the furthest we can go.
1861 ThisPtr = ThisPtr.stripBaseCasts();
1862 }
1863 }
1864
1865 if (!Call(S, OpPC, Func, VarArgSize))
1866 return false;
1867
1868 // Covariant return types. The return type of Overrider is a pointer
1869 // or reference to a class type.
1870 if (Overrider != InitialFunction &&
1871 Overrider->getReturnType()->isPointerOrReferenceType() &&
1872 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1873 QualType OverriderPointeeType =
1874 Overrider->getReturnType()->getPointeeType();
1875 QualType InitialPointeeType =
1876 InitialFunction->getReturnType()->getPointeeType();
1877 // We've called Overrider above, but calling code expects us to return what
1878 // InitialFunction returned. According to the rules for covariant return
1879 // types, what InitialFunction returns needs to be a base class of what
1880 // Overrider returns. So, we need to do an upcast here.
1881 unsigned Offset = S.getContext().collectBaseOffset(
1882 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1883 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1884 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1885 }
1886
1887 return true;
1888}
1889
1890bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1891 uint32_t BuiltinID) {
1892 // A little arbitrary, but the current interpreter allows evaluation
1893 // of builtin functions in this mode, with some exceptions.
1894 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1895 S.checkingPotentialConstantExpression())
1896 return false;
1897
1898 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1899}
1900
1901bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1902 const CallExpr *CE) {
1903 const Pointer &Ptr = S.Stk.pop<Pointer>();
1904
1905 if (Ptr.isZero()) {
1906 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_null_callee)
1907 << const_cast<Expr *>(CE->getCallee()) << CE->getSourceRange();
1908 return false;
1909 }
1910
1911 if (!Ptr.isFunctionPointer())
1912 return Invalid(S, OpPC);
1913
1914 const FunctionPointer &FuncPtr = Ptr.asFunctionPointer();
1915 const Function *F = FuncPtr.getFunction();
1916 assert(F);
1917 // Don't allow calling block pointers.
1918 if (!F->getDecl())
1919 return Invalid(S, OpPC);
1920
1921 // This happens when the call expression has been cast to
1922 // something else, but we don't support that.
1923 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1924 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1925 return false;
1926
1927 // Check argument nullability state.
1928 if (F->hasNonNullAttr()) {
1929 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1930 return false;
1931 }
1932
1933 // Can happen when casting function pointers around.
1934 QualType CalleeType = CE->getCallee()->getType();
1935 if (CalleeType->isPointerType() &&
1936 !S.getASTContext().hasSameFunctionTypeIgnoringExceptionSpec(
1937 T: F->getDecl()->getType(), U: CalleeType->getPointeeType())) {
1938 return false;
1939 }
1940
1941 // We nedd to compile (and check) early for function pointer calls
1942 // because the Call/CallVirt below might access the instance pointer
1943 // but the Function's information about them is wrong.
1944 if (!F->isFullyCompiled())
1945 compileFunction(S, Func: F);
1946
1947 if (!CheckCallable(S, OpPC, F))
1948 return false;
1949
1950 assert(ArgSize >= F->getWrittenArgSize());
1951 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1952
1953 // We need to do this explicitly here since we don't have the necessary
1954 // information to do it automatically.
1955 if (F->isThisPointerExplicit())
1956 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1957
1958 if (F->isVirtual())
1959 return CallVirt(S, OpPC, Func: F, VarArgSize);
1960
1961 return Call(S, OpPC, Func: F, VarArgSize);
1962}
1963
1964static void startLifetimeRecurse(const Pointer &Ptr) {
1965 if (const Record *R = Ptr.getRecord()) {
1966 Ptr.startLifetime();
1967 for (const Record::Field &Fi : R->fields())
1968 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1969 return;
1970 }
1971
1972 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1973 FieldDesc->isCompositeArray()) {
1974 assert(Ptr.getLifetime() == Lifetime::Started);
1975 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1976 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1977 return;
1978 }
1979
1980 Ptr.startLifetime();
1981}
1982
1983bool StartLifetime(InterpState &S, CodePtr OpPC) {
1984 const auto &Ptr = S.Stk.peek<Pointer>();
1985 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1986 return false;
1987 startLifetimeRecurse(Ptr: Ptr.narrow());
1988 return true;
1989}
1990
1991// FIXME: It might be better to the recursing as part of the generated code for
1992// a destructor?
1993static void endLifetimeRecurse(const Pointer &Ptr) {
1994 if (const Record *R = Ptr.getRecord()) {
1995 Ptr.endLifetime();
1996 for (const Record::Field &Fi : R->fields())
1997 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1998 return;
1999 }
2000
2001 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
2002 FieldDesc->isCompositeArray()) {
2003 // No endLifetime() for array roots.
2004 assert(Ptr.getLifetime() == Lifetime::Started);
2005 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
2006 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
2007 return;
2008 }
2009
2010 Ptr.endLifetime();
2011}
2012
2013/// Ends the lifetime of the peek'd pointer.
2014bool EndLifetime(InterpState &S, CodePtr OpPC) {
2015 const auto &Ptr = S.Stk.peek<Pointer>();
2016 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2017 return false;
2018
2019 endLifetimeRecurse(Ptr: Ptr.narrow());
2020 return true;
2021}
2022
2023/// Ends the lifetime of the pop'd pointer.
2024bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
2025 const auto &Ptr = S.Stk.pop<Pointer>();
2026 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2027 return false;
2028
2029 endLifetimeRecurse(Ptr: Ptr.narrow());
2030 return true;
2031}
2032
2033bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
2034 std::optional<uint64_t> ArraySize) {
2035 const Pointer &Ptr = S.Stk.peek<Pointer>();
2036
2037 if (Ptr.inUnion() && Ptr.getBase().getRecord()->isUnion())
2038 Ptr.activate();
2039
2040 if (Ptr.isZero()) {
2041 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_null)
2042 << AK_Construct;
2043 return false;
2044 }
2045
2046 if (!Ptr.isBlockPointer())
2047 return false;
2048
2049 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2050 return false;
2051
2052 startLifetimeRecurse(Ptr);
2053
2054 // Similar to CheckStore(), but with the additional CheckTemporary() call and
2055 // the AccessKinds are different.
2056 if (!Ptr.block()->isAccessible()) {
2057 if (!CheckExtern(S, OpPC, Ptr))
2058 return false;
2059 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
2060 return false;
2061 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Construct);
2062 }
2063 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Construct))
2064 return false;
2065
2066 // CheckLifetime for this and all base pointers.
2067 for (Pointer P = Ptr;;) {
2068 if (!CheckLifetime(S, OpPC, LT: P.getLifetime(), AK: AK_Construct))
2069 return false;
2070
2071 if (P.isRoot())
2072 break;
2073 P = P.getBase();
2074 }
2075
2076 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2077 return false;
2078 if (!CheckGlobal(S, OpPC, Ptr))
2079 return false;
2080 if (!CheckConst(S, OpPC, Ptr))
2081 return false;
2082 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
2083 return false;
2084
2085 if (!InvalidNewDeleteExpr(S, OpPC, E))
2086 return false;
2087
2088 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
2089 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
2090 const ASTContext &ASTCtx = S.getASTContext();
2091 QualType AllocType;
2092 if (ArraySize) {
2093 AllocType = ASTCtx.getConstantArrayType(
2094 EltTy: NewExpr->getAllocatedType(),
2095 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
2096 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
2097 } else {
2098 AllocType = NewExpr->getAllocatedType();
2099 }
2100
2101 unsigned StorageSize = 1;
2102 unsigned AllocSize = 1;
2103 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
2104 AllocSize = CAT->getZExtSize();
2105 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
2106 StorageSize = CAT->getZExtSize();
2107
2108 if (AllocSize > StorageSize ||
2109 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
2110 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
2111 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
2112 DiagId: diag::note_constexpr_placement_new_wrong_type)
2113 << StorageType << AllocType;
2114 return false;
2115 }
2116
2117 // Can't activate fields in a union, unless the direct base is the union.
2118 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
2119 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
2120
2121 return true;
2122}
2123
2124bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
2125 assert(E);
2126
2127 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
2128 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
2129
2130 if (NewExpr->getNumPlacementArgs() > 0) {
2131 // This is allowed pre-C++26, but only an std function or if
2132 // [[msvc::constexpr]] was used.
2133 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction() ||
2134 S.Current->MSVCConstexprAllowed)
2135 return true;
2136
2137 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2138 << /*C++26 feature*/ 1 << E->getSourceRange();
2139 } else if (
2140 !OperatorNew
2141 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2142 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2143 DiagId: diag::note_constexpr_new_non_replaceable)
2144 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
2145 return false;
2146 } else if (!S.getLangOpts().CPlusPlus26 &&
2147 NewExpr->getNumPlacementArgs() == 1 &&
2148 !OperatorNew->isReservedGlobalPlacementOperator()) {
2149 if (!S.getLangOpts().CPlusPlus26) {
2150 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2151 << /*Unsupported*/ 0 << E->getSourceRange();
2152 return false;
2153 }
2154 return true;
2155 }
2156 } else {
2157 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
2158 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
2159 if (!OperatorDelete
2160 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2161 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2162 DiagId: diag::note_constexpr_new_non_replaceable)
2163 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
2164 return false;
2165 }
2166 }
2167
2168 return false;
2169}
2170
2171bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
2172 const FixedPoint &FP) {
2173 const Expr *E = S.Current->getExpr(PC: OpPC);
2174 if (S.checkingForUndefinedBehavior()) {
2175 S.getASTContext().getDiagnostics().Report(
2176 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
2177 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2178 }
2179 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
2180 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2181 return S.noteUndefinedBehavior();
2182}
2183
2184bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
2185 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
2186 S.FFDiag(SI: Loc,
2187 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
2188 << Index;
2189 return false;
2190}
2191
2192bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
2193 const Pointer &Ptr, unsigned BitWidth) {
2194 const SourceInfo &E = S.Current->getSource(PC: OpPC);
2195 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
2196 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2197
2198 if (Ptr.isDummy())
2199 return false;
2200 if (Ptr.isFunctionPointer())
2201 return true;
2202
2203 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
2204 // Only allow based lvalue casts if they are lossless.
2205 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
2206 BitWidth)
2207 return Invalid(S, OpPC);
2208 }
2209 return true;
2210}
2211
2212bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2213 const Pointer &Ptr = S.Stk.pop<Pointer>();
2214
2215 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2216 return false;
2217
2218 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
2219 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2220
2221 S.Stk.push<IntegralAP<false>>(Args&: Result);
2222 return true;
2223}
2224
2225bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2226 const Pointer &Ptr = S.Stk.pop<Pointer>();
2227
2228 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2229 return false;
2230
2231 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
2232 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2233
2234 S.Stk.push<IntegralAP<true>>(Args&: Result);
2235 return true;
2236}
2237
2238bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
2239 bool TargetIsUCharOrByte) {
2240 // This is always fine.
2241 if (!HasIndeterminateBits)
2242 return true;
2243
2244 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
2245 if (TargetIsUCharOrByte)
2246 return true;
2247
2248 const Expr *E = S.Current->getExpr(PC: OpPC);
2249 QualType ExprType = E->getType();
2250 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
2251 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
2252 return false;
2253}
2254
2255bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
2256 const Type *TypeInfoType) {
2257 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
2258 return true;
2259}
2260
2261bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
2262 const auto &P = S.Stk.pop<Pointer>();
2263
2264 if (!P.isBlockPointer())
2265 return false;
2266
2267 // Pick the most-derived type.
2268 CanQualType T = P.getDeclPtr().getType()->getCanonicalTypeUnqualified();
2269 // ... unless we're currently constructing this object.
2270 // FIXME: We have a similar check to this in more places.
2271 if (S.Current->getFunction()) {
2272 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
2273 if (const Function *Func = Frame->getFunction();
2274 Func && (Func->isConstructor() || Func->isDestructor()) &&
2275 P.block() == Frame->getThis().block()) {
2276 T = S.getContext().getASTContext().getCanonicalTagType(
2277 TD: Func->getParentDecl());
2278 break;
2279 }
2280 }
2281 }
2282
2283 S.Stk.push<Pointer>(Args: T->getTypePtr(), Args&: TypeInfoType);
2284 return true;
2285}
2286
2287bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2288 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2289 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2290 << E->getExprOperand()->getType()
2291 << E->getExprOperand()->getSourceRange();
2292 return false;
2293}
2294
2295bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2296 const Pointer &RHS) {
2297 unsigned LHSOffset = LHS.isOnePastEnd() ? LHS.getNumElems() : LHS.getIndex();
2298 unsigned RHSOffset = RHS.isOnePastEnd() ? RHS.getNumElems() : RHS.getIndex();
2299 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2300 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2301
2302 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2303 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2304 int32_t IndexDiff = RHSOffset - LHSOffset;
2305 if (IndexDiff < 0) {
2306 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2307 return false;
2308 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2309 } else {
2310 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2311 return false;
2312 RHSStr = RHSStr.drop_front(N: IndexDiff);
2313 }
2314
2315 unsigned ShorterCharWidth;
2316 StringRef Shorter;
2317 StringRef Longer;
2318 if (LHSLength < RHSLength) {
2319 ShorterCharWidth = LHS.elemSize();
2320 Shorter = LHSStr;
2321 Longer = RHSStr;
2322 } else {
2323 ShorterCharWidth = RHS.elemSize();
2324 Shorter = RHSStr;
2325 Longer = LHSStr;
2326 }
2327
2328 // The null terminator isn't included in the string data, so check for it
2329 // manually. If the longer string doesn't have a null terminator where the
2330 // shorter string ends, they aren't potentially overlapping.
2331 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2332 if (Shorter.size() + NullByte >= Longer.size())
2333 break;
2334 if (Longer[Shorter.size() + NullByte])
2335 return false;
2336 }
2337 return Shorter == Longer.take_front(N: Shorter.size());
2338}
2339
2340static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2341 PrimType T) {
2342 if (T == PT_IntAPS) {
2343 auto &Val = Ptr.deref<IntegralAP<true>>();
2344 if (!Val.singleWord()) {
2345 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2346 Val.take(NewMemory);
2347 }
2348 } else if (T == PT_IntAP) {
2349 auto &Val = Ptr.deref<IntegralAP<false>>();
2350 if (!Val.singleWord()) {
2351 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2352 Val.take(NewMemory);
2353 }
2354 } else if (T == PT_Float) {
2355 auto &Val = Ptr.deref<Floating>();
2356 if (!Val.singleWord()) {
2357 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2358 Val.take(NewMemory);
2359 }
2360 } else if (T == PT_MemberPtr) {
2361 auto &Val = Ptr.deref<MemberPointer>();
2362 unsigned PathLength = Val.getPathLength();
2363 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2364 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2365 Val.takePath(NewPath);
2366 }
2367}
2368
2369template <typename T>
2370static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2371 assert(needsAlloc<T>());
2372 if constexpr (std::is_same_v<T, MemberPointer>) {
2373 auto &Val = Ptr.deref<MemberPointer>();
2374 unsigned PathLength = Val.getPathLength();
2375 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2376 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2377 Val.takePath(NewPath);
2378 } else {
2379 auto &Val = Ptr.deref<T>();
2380 if (!Val.singleWord()) {
2381 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2382 Val.take(NewMemory);
2383 }
2384 }
2385}
2386
2387static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2388 if (const Record *R = Ptr.getRecord()) {
2389 for (const Record::Field &Fi : R->fields()) {
2390 if (Fi.Desc->isPrimitive()) {
2391 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2392 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2393 });
2394 } else {
2395 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2396 }
2397 }
2398 return;
2399 }
2400
2401 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2402 unsigned NumElems = D->getNumElems();
2403 if (NumElems == 0)
2404 return;
2405
2406 if (D->isPrimitiveArray()) {
2407 PrimType PT = D->getPrimType();
2408 if (!needsAlloc(T: PT))
2409 return;
2410 assert(NumElems >= 1);
2411 const Pointer EP = Ptr.atIndex(Idx: 0);
2412 bool AllSingleWord = true;
2413 TYPE_SWITCH_ALLOC(PT, {
2414 if (!EP.deref<T>().singleWord()) {
2415 copyPrimitiveMemory<T>(S, EP);
2416 AllSingleWord = false;
2417 }
2418 });
2419 if (AllSingleWord)
2420 return;
2421 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2422 const Pointer EP = Ptr.atIndex(Idx: I);
2423 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2424 }
2425 } else {
2426 assert(D->isCompositeArray());
2427 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2428 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2429 finishGlobalRecurse(S, Ptr: EP);
2430 }
2431 }
2432 }
2433}
2434
2435bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2436 const Pointer &Ptr = S.Stk.pop<Pointer>();
2437
2438 finishGlobalRecurse(S, Ptr);
2439 if (Ptr.canBeInitialized()) {
2440 Ptr.initialize();
2441 Ptr.activate();
2442 }
2443
2444 return true;
2445}
2446
2447bool InvalidCast(InterpState &S, CodePtr OpPC, CastKind Kind, bool Fatal) {
2448 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
2449
2450 switch (Kind) {
2451 case CastKind::Reinterpret:
2452 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2453 << diag::ConstexprInvalidCastKind::Reinterpret
2454 << S.Current->getRange(PC: OpPC);
2455 return !Fatal;
2456 case CastKind::ReinterpretLike:
2457 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2458 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
2459 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2460 return !Fatal;
2461 case CastKind::Volatile:
2462 if (!S.checkingPotentialConstantExpression()) {
2463 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
2464 if (S.getLangOpts().CPlusPlus)
2465 S.FFDiag(E, DiagId: diag::note_constexpr_access_volatile_type)
2466 << AK_Read << E->getSubExpr()->getType();
2467 else
2468 S.FFDiag(E);
2469 }
2470
2471 return false;
2472 case CastKind::Dynamic:
2473 assert(!S.getLangOpts().CPlusPlus20);
2474 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2475 << diag::ConstexprInvalidCastKind::Dynamic;
2476 return true;
2477 }
2478 llvm_unreachable("Unhandled CastKind");
2479 return false;
2480}
2481
2482bool Destroy(InterpState &S, CodePtr OpPC, uint32_t I) {
2483 assert(S.Current->getFunction());
2484 // FIXME: We iterate the scope once here and then again in the destroy() call
2485 // below.
2486 for (auto &Local : S.Current->getFunction()->getScope(Idx: I).locals_reverse()) {
2487 if (!S.Current->getLocalBlock(Offset: Local.Offset)->isInitialized())
2488 continue;
2489 const Pointer &Ptr = S.Current->getLocalPointer(Offset: Local.Offset);
2490 if (Ptr.getLifetime() == Lifetime::Ended) {
2491 // Try to use the declaration for better diagnostics
2492 if (const Decl *D = Ptr.getDeclDesc()->asDecl()) {
2493 auto *ND = cast<NamedDecl>(Val: D);
2494 S.FFDiag(Loc: ND->getLocation(),
2495 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2496 << ND->getNameAsString();
2497 } else {
2498 S.FFDiag(Loc: Ptr.getDeclDesc()->getLocation(),
2499 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2500 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
2501 }
2502 return false;
2503 }
2504 }
2505
2506 S.Current->destroy(Idx: I);
2507 return true;
2508}
2509
2510// Perform a cast towards the class of the Decl (either up or down the
2511// hierarchy).
2512static bool castBackMemberPointer(InterpState &S,
2513 const MemberPointer &MemberPtr,
2514 int32_t BaseOffset,
2515 const RecordDecl *BaseDecl) {
2516 const CXXRecordDecl *Expected;
2517 if (MemberPtr.getPathLength() >= 2)
2518 Expected = MemberPtr.getPathEntry(Index: MemberPtr.getPathLength() - 2);
2519 else
2520 Expected = MemberPtr.getRecordDecl();
2521
2522 assert(Expected);
2523 if (Expected->getCanonicalDecl() != BaseDecl->getCanonicalDecl()) {
2524 // C++11 [expr.static.cast]p12: In a conversion from (D::*) to (B::*),
2525 // if B does not contain the original member and is not a base or
2526 // derived class of the class containing the original member, the result
2527 // of the cast is undefined.
2528 // C++11 [conv.mem]p2 does not cover this case for a cast from (B::*) to
2529 // (D::*). We consider that to be a language defect.
2530 return false;
2531 }
2532
2533 unsigned OldPathLength = MemberPtr.getPathLength();
2534 unsigned NewPathLength = OldPathLength - 1;
2535 bool IsDerivedMember = NewPathLength != 0;
2536 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2537 std::copy_n(first: MemberPtr.path(), n: NewPathLength, result: NewPath);
2538
2539 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2540 Path: NewPath, NewIsDerived: IsDerivedMember));
2541 return true;
2542}
2543
2544static bool appendToMemberPointer(InterpState &S,
2545 const MemberPointer &MemberPtr,
2546 int32_t BaseOffset,
2547 const RecordDecl *BaseDecl,
2548 bool IsDerivedMember) {
2549 unsigned OldPathLength = MemberPtr.getPathLength();
2550 unsigned NewPathLength = OldPathLength + 1;
2551
2552 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2553 std::copy_n(first: MemberPtr.path(), n: OldPathLength, result: NewPath);
2554 NewPath[OldPathLength] = cast<CXXRecordDecl>(Val: BaseDecl);
2555
2556 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2557 Path: NewPath, NewIsDerived: IsDerivedMember));
2558 return true;
2559}
2560
2561/// DerivedToBaseMemberPointer
2562bool CastMemberPtrBasePop(InterpState &S, CodePtr OpPC, int32_t Off,
2563 const RecordDecl *BaseDecl) {
2564 const auto &Ptr = S.Stk.pop<MemberPointer>();
2565
2566 if (!Ptr.isDerivedMember() && Ptr.hasPath())
2567 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2568
2569 bool IsDerivedMember = Ptr.isDerivedMember() || !Ptr.hasPath();
2570 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl, IsDerivedMember);
2571}
2572
2573/// BaseToDerivedMemberPointer
2574bool CastMemberPtrDerivedPop(InterpState &S, CodePtr OpPC, int32_t Off,
2575 const RecordDecl *BaseDecl) {
2576 const auto &Ptr = S.Stk.pop<MemberPointer>();
2577
2578 if (!Ptr.isDerivedMember()) {
2579 // Simply append.
2580 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl,
2581 /*IsDerivedMember=*/false);
2582 }
2583
2584 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2585}
2586
2587// https://github.com/llvm/llvm-project/issues/102513
2588#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2589#pragma optimize("", off)
2590#endif
2591bool Interpret(InterpState &S) {
2592 // The current stack frame when we started Interpret().
2593 // This is being used by the ops to determine wheter
2594 // to return from this function and thus terminate
2595 // interpretation.
2596 const InterpFrame *StartFrame = S.Current;
2597 assert(!S.Current->isRoot());
2598 CodePtr PC = S.Current->getPC();
2599
2600 // Empty program.
2601 if (!PC)
2602 return true;
2603
2604 for (;;) {
2605 auto Op = PC.read<Opcode>();
2606 CodePtr OpPC = PC;
2607
2608 switch (Op) {
2609#define GET_INTERP
2610#include "Opcodes.inc"
2611#undef GET_INTERP
2612 }
2613 }
2614}
2615// https://github.com/llvm/llvm-project/issues/102513
2616#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2617#pragma optimize("", on)
2618#endif
2619
2620} // namespace interp
2621} // namespace clang
2622