1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31static bool RetValue(InterpState &S, CodePtr &Pt) {
32 llvm::report_fatal_error(reason: "Interpreter cannot return values");
33}
34
35//===----------------------------------------------------------------------===//
36// Jmp, Jt, Jf
37//===----------------------------------------------------------------------===//
38
39static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
40 PC += Offset;
41 return S.noteStep(OpPC: PC);
42}
43
44static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
45 if (S.Stk.pop<bool>()) {
46 PC += Offset;
47 }
48 return S.noteStep(OpPC: PC);
49}
50
51static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
52 if (!S.Stk.pop<bool>()) {
53 PC += Offset;
54 }
55 return S.noteStep(OpPC: PC);
56}
57
58// https://github.com/llvm/llvm-project/issues/102513
59#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
60#pragma optimize("", off)
61#endif
62// FIXME: We have the large switch over all opcodes here again, and in
63// Interpret().
64static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset, PrimType PT) {
65 [[maybe_unused]] CodePtr PCBefore = RealPC;
66 size_t StackSizeBefore = S.Stk.size();
67
68 auto SpeculativeInterp = [&S, RealPC]() -> bool {
69 const InterpFrame *StartFrame = S.Current;
70 CodePtr PC = RealPC;
71
72 for (;;) {
73 auto Op = PC.read<Opcode>();
74 if (Op == OP_EndSpeculation)
75 return true;
76 CodePtr OpPC = PC;
77
78 switch (Op) {
79#define GET_INTERP
80#include "Opcodes.inc"
81#undef GET_INTERP
82 }
83 }
84 llvm_unreachable("We didn't see an EndSpeculation op?");
85 };
86
87 if (SpeculativeInterp()) {
88 if (PT == PT_Ptr) {
89 const auto &Ptr = S.Stk.pop<Pointer>();
90 assert(S.Stk.size() == StackSizeBefore);
91 S.Stk.push<Integral<32, true>>(
92 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
93 } else {
94 // Pop the result from the stack and return success.
95 TYPE_SWITCH(PT, S.Stk.pop<T>(););
96 assert(S.Stk.size() == StackSizeBefore);
97 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
98 }
99 } else {
100 if (!S.inConstantContext())
101 return Invalid(S, OpPC: RealPC);
102
103 S.Stk.clearTo(NewSize: StackSizeBefore);
104 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
105 }
106
107 // RealPC should not have been modified.
108 assert(*RealPC == *PCBefore);
109
110 // Jump to end label. This is a little tricker than just RealPC += Offset
111 // because our usual jump instructions don't have any arguments, to the offset
112 // we get is a little too much and we need to subtract the size of the
113 // bool and PrimType arguments again.
114 int32_t ParamSize = align(Size: sizeof(PrimType));
115 assert(Offset >= ParamSize);
116 RealPC += Offset - ParamSize;
117
118 [[maybe_unused]] CodePtr PCCopy = RealPC;
119 assert(PCCopy.read<Opcode>() == OP_EndSpeculation);
120
121 return true;
122}
123// https://github.com/llvm/llvm-project/issues/102513
124#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
125#pragma optimize("", on)
126#endif
127
128static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
129 const ValueDecl *VD) {
130 const SourceInfo &E = S.Current->getSource(PC: OpPC);
131 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
132 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
133}
134
135static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
136 const ValueDecl *VD);
137static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
138 const ValueDecl *D) {
139 // This function tries pretty hard to produce a good diagnostic. Just skip
140 // that if nobody will see it anyway.
141 if (!S.diagnosing())
142 return false;
143
144 if (isa<ParmVarDecl>(Val: D)) {
145 if (D->getType()->isReferenceType()) {
146 if (S.inConstantContext() && S.getLangOpts().CPlusPlus &&
147 !S.getLangOpts().CPlusPlus11) {
148 diagnoseNonConstVariable(S, OpPC, VD: D);
149 return false;
150 }
151 }
152
153 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
154 if (S.getLangOpts().CPlusPlus23 && D->getType()->isReferenceType()) {
155 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_unknown_variable, ExtraNotes: 1)
156 << AK_Read << D;
157 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
158 } else if (S.getLangOpts().CPlusPlus11) {
159 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown, ExtraNotes: 1) << D;
160 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
161 } else {
162 S.FFDiag(SI: Loc);
163 }
164 return false;
165 }
166
167 if (!D->getType().isConstQualified()) {
168 diagnoseNonConstVariable(S, OpPC, VD: D);
169 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
170 if (!VD->getAnyInitializer()) {
171 diagnoseMissingInitializer(S, OpPC, VD);
172 } else {
173 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
174 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
175 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
176 }
177 }
178
179 return false;
180}
181
182static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
183 const ValueDecl *VD) {
184 if (!S.diagnosing())
185 return;
186
187 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
188 if (!S.getLangOpts().CPlusPlus) {
189 S.FFDiag(SI: Loc);
190 return;
191 }
192
193 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
194 VarD && VarD->getType().isConstQualified() &&
195 !VarD->getAnyInitializer()) {
196 diagnoseMissingInitializer(S, OpPC, VD);
197 return;
198 }
199
200 // Rather random, but this is to match the diagnostic output of the current
201 // interpreter.
202 if (isa<ObjCIvarDecl>(Val: VD))
203 return;
204
205 if (VD->getType()->isIntegralOrEnumerationType()) {
206 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
207 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
208 return;
209 }
210
211 S.FFDiag(SI: Loc,
212 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
213 : diag::note_constexpr_ltor_non_integral,
214 ExtraNotes: 1)
215 << VD << VD->getType();
216 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
217}
218
219static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Block *B,
220 AccessKinds AK) {
221 if (B->getDeclID()) {
222 if (!(B->isStatic() && B->isTemporary()))
223 return true;
224
225 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
226 Val: B->getDescriptor()->asExpr());
227 if (!MTE)
228 return true;
229
230 // FIXME(perf): Since we do this check on every Load from a static
231 // temporary, it might make sense to cache the value of the
232 // isUsableInConstantExpressions call.
233 if (B->getEvalID() != S.EvalID &&
234 !MTE->isUsableInConstantExpressions(Context: S.getASTContext())) {
235 const SourceInfo &E = S.Current->getSource(PC: OpPC);
236 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
237 S.Note(Loc: B->getDescriptor()->getLocation(),
238 DiagId: diag::note_constexpr_temporary_here);
239 return false;
240 }
241 }
242 return true;
243}
244
245static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
246 if (auto ID = Ptr.getDeclID()) {
247 if (!Ptr.isStatic())
248 return true;
249
250 if (S.P.getCurrentDecl() == ID)
251 return true;
252
253 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
254 return false;
255 }
256 return true;
257}
258
259namespace clang {
260namespace interp {
261static void popArg(InterpState &S, const Expr *Arg) {
262 PrimType Ty = S.getContext().classify(E: Arg).value_or(PT: PT_Ptr);
263 TYPE_SWITCH(Ty, S.Stk.discard<T>());
264}
265
266void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
267 const Function *Func) {
268 assert(S.Current);
269 assert(Func);
270
271 if (S.Current->Caller && Func->isVariadic()) {
272 // CallExpr we're look for is at the return PC of the current function, i.e.
273 // in the caller.
274 // This code path should be executed very rarely.
275 unsigned NumVarArgs;
276 const Expr *const *Args = nullptr;
277 unsigned NumArgs = 0;
278 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
279 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
280 Args = CE->getArgs();
281 NumArgs = CE->getNumArgs();
282 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
283 Args = CE->getArgs();
284 NumArgs = CE->getNumArgs();
285 } else
286 assert(false && "Can't get arguments from that expression type");
287
288 assert(NumArgs >= Func->getNumWrittenParams());
289 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
290 isa<CXXOperatorCallExpr>(Val: CallSite));
291 for (unsigned I = 0; I != NumVarArgs; ++I) {
292 const Expr *A = Args[NumArgs - 1 - I];
293 popArg(S, Arg: A);
294 }
295 }
296
297 // And in any case, remove the fixed parameters (the non-variadic ones)
298 // at the end.
299 for (const Function::ParamDescriptor &PDesc : Func->args_reverse())
300 TYPE_SWITCH(PDesc.T, S.Stk.discard<T>());
301}
302
303bool isConstexprUnknown(const Pointer &P) {
304 if (!P.isBlockPointer())
305 return false;
306
307 if (P.isDummy())
308 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
309
310 return P.getDeclDesc()->IsConstexprUnknown;
311}
312
313bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
314 if (Ptr.isDummy())
315 return false;
316 if (Ptr.isZero())
317 return true;
318 if (Ptr.isFunctionPointer())
319 return false;
320 if (Ptr.isIntegralPointer())
321 return true;
322 if (Ptr.isTypeidPointer())
323 return true;
324
325 if (Ptr.getType()->isAnyComplexType())
326 return true;
327
328 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
329 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
330 return false;
331}
332
333bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
334 AccessKinds AK, bool WillActivate) {
335 if (Ptr.isActive())
336 return true;
337
338 assert(Ptr.inUnion());
339
340 // Find the outermost union.
341 Pointer U = Ptr.getBase();
342 Pointer C = Ptr;
343 while (!U.isRoot() && !U.isActive()) {
344 // A little arbitrary, but this is what the current interpreter does.
345 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
346 // GCC's output is more similar to what we would get without
347 // this condition.
348 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
349 break;
350
351 C = U;
352 U = U.getBase();
353 }
354 assert(C.isField());
355 assert(C.getBase() == U);
356
357 // Consider:
358 // union U {
359 // struct {
360 // int x;
361 // int y;
362 // } a;
363 // }
364 //
365 // When activating x, we will also activate a. If we now try to read
366 // from y, we will get to CheckActive, because y is not active. In that
367 // case, our U will be a (not a union). We return here and let later code
368 // handle this.
369 if (!U.getFieldDesc()->isUnion())
370 return true;
371
372 // When we will activate Ptr, check that none of the unions in its path have a
373 // non-trivial default constructor.
374 if (WillActivate) {
375 bool Fails = false;
376 Pointer It = Ptr;
377 while (!It.isRoot() && !It.isActive()) {
378 if (const Record *R = It.getRecord(); R && R->isUnion()) {
379 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(Val: R->getDecl());
380 CXXRD && !CXXRD->hasTrivialDefaultConstructor()) {
381 Fails = true;
382 break;
383 }
384 }
385 It = It.getBase();
386 }
387 if (!Fails)
388 return true;
389 }
390
391 // Get the inactive field descriptor.
392 assert(!C.isActive());
393 const FieldDecl *InactiveField = C.getField();
394 assert(InactiveField);
395
396 // Find the active field of the union.
397 const Record *R = U.getRecord();
398 assert(R && R->isUnion() && "Not a union");
399
400 const FieldDecl *ActiveField = nullptr;
401 for (const Record::Field &F : R->fields()) {
402 const Pointer &Field = U.atField(Off: F.Offset);
403 if (Field.isActive()) {
404 ActiveField = Field.getField();
405 break;
406 }
407 }
408
409 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
410 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
411 << AK << InactiveField << !ActiveField << ActiveField;
412 return false;
413}
414
415bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
416 if (!Ptr.isExtern())
417 return true;
418
419 if (!Ptr.isPastEnd() &&
420 (Ptr.isInitialized() ||
421 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)))
422 return true;
423
424 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
425 Ptr.isConst())
426 return false;
427
428 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
429 diagnoseNonConstVariable(S, OpPC, VD);
430 return false;
431}
432
433bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
434 if (!Ptr.isUnknownSizeArray())
435 return true;
436 const SourceInfo &E = S.Current->getSource(PC: OpPC);
437 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
438 return false;
439}
440
441bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
442 AccessKinds AK) {
443 if (Ptr.isZero()) {
444 const auto &Src = S.Current->getSource(PC: OpPC);
445
446 if (Ptr.isField())
447 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
448 else
449 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
450
451 return false;
452 }
453
454 if (!Ptr.isLive()) {
455 const auto &Src = S.Current->getSource(PC: OpPC);
456
457 if (Ptr.isDynamic()) {
458 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
459 } else if (!S.checkingPotentialConstantExpression()) {
460 bool IsTemp = Ptr.isTemporary();
461 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_uninit)
462 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
463
464 if (IsTemp)
465 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
466 else
467 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
468 }
469
470 return false;
471 }
472
473 return true;
474}
475
476bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
477 assert(Desc);
478
479 const auto *D = Desc->asVarDecl();
480 if (!D || D == S.EvaluatingDecl || D->isConstexpr())
481 return true;
482
483 // If we're evaluating the initializer for a constexpr variable in C23, we may
484 // only read other contexpr variables. Abort here since this one isn't
485 // constexpr.
486 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
487 VD && VD->isConstexpr() && S.getLangOpts().C23)
488 return Invalid(S, OpPC);
489
490 QualType T = D->getType();
491 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
492 if (T->isIntegralOrEnumerationType()) {
493 if (!IsConstant) {
494 diagnoseNonConstVariable(S, OpPC, VD: D);
495 return false;
496 }
497 return true;
498 }
499
500 if (IsConstant) {
501 if (S.getLangOpts().CPlusPlus) {
502 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
503 DiagId: S.getLangOpts().CPlusPlus11
504 ? diag::note_constexpr_ltor_non_constexpr
505 : diag::note_constexpr_ltor_non_integral,
506 ExtraNotes: 1)
507 << D << T;
508 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
509 } else {
510 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
511 }
512 return true;
513 }
514
515 if (T->isPointerOrReferenceType()) {
516 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
517 !S.getLangOpts().CPlusPlus11) {
518 diagnoseNonConstVariable(S, OpPC, VD: D);
519 return false;
520 }
521 return true;
522 }
523
524 diagnoseNonConstVariable(S, OpPC, VD: D);
525 return false;
526}
527
528static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
529 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
530 return true;
531 if (!Ptr.getDeclID())
532 return true;
533 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
534}
535
536bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
537 CheckSubobjectKind CSK) {
538 if (!Ptr.isZero())
539 return true;
540 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
541 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
542 << CSK << S.Current->getRange(PC: OpPC);
543
544 return false;
545}
546
547bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
548 AccessKinds AK) {
549 if (!Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray())
550 return true;
551 if (S.getLangOpts().CPlusPlus) {
552 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
553 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
554 << AK << S.Current->getRange(PC: OpPC);
555 }
556 return false;
557}
558
559bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
560 CheckSubobjectKind CSK) {
561 if (!Ptr.isElementPastEnd() && !Ptr.isZeroSizeArray())
562 return true;
563 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
564 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
565 << CSK << S.Current->getRange(PC: OpPC);
566 return false;
567}
568
569bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
570 CheckSubobjectKind CSK) {
571 if (!Ptr.isOnePastEnd())
572 return true;
573
574 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
575 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
576 << CSK << S.Current->getRange(PC: OpPC);
577 return false;
578}
579
580bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
581 uint32_t Offset) {
582 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
583 uint32_t PtrOffset = Ptr.getByteOffset();
584
585 // We subtract Offset from PtrOffset. The result must be at least
586 // MinOffset.
587 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
588 return true;
589
590 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
591 QualType TargetQT = E->getType()->getPointeeType();
592 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
593
594 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
595 << MostDerivedQT << TargetQT;
596
597 return false;
598}
599
600bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
601 assert(Ptr.isLive() && "Pointer is not live");
602 if (!Ptr.isConst())
603 return true;
604
605 if (Ptr.isMutable() && !Ptr.isConstInMutable())
606 return true;
607
608 if (!Ptr.isBlockPointer())
609 return false;
610
611 // The This pointer is writable in constructors and destructors,
612 // even if isConst() returns true.
613 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
614 return true;
615
616 const QualType Ty = Ptr.getType();
617 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
618 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
619 return false;
620}
621
622bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
623 assert(Ptr.isLive() && "Pointer is not live");
624 if (!Ptr.isMutable())
625 return true;
626
627 // In C++14 onwards, it is permitted to read a mutable member whose
628 // lifetime began within the evaluation.
629 if (S.getLangOpts().CPlusPlus14 && Ptr.block()->getEvalID() == S.EvalID)
630 return true;
631
632 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
633 const FieldDecl *Field = Ptr.getField();
634 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
635 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
636 return false;
637}
638
639static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
640 AccessKinds AK) {
641 assert(Ptr.isLive());
642
643 if (!Ptr.isVolatile())
644 return true;
645
646 if (!S.getLangOpts().CPlusPlus)
647 return Invalid(S, OpPC);
648
649 // Volatile object can be written-to and read if they are being constructed.
650 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
651 return true;
652
653 // The reason why Ptr is volatile might be further up the hierarchy.
654 // Find that pointer.
655 Pointer P = Ptr;
656 while (!P.isRoot()) {
657 if (P.getType().isVolatileQualified())
658 break;
659 P = P.getBase();
660 }
661
662 const NamedDecl *ND = nullptr;
663 int DiagKind;
664 SourceLocation Loc;
665 if (const auto *F = P.getField()) {
666 DiagKind = 2;
667 Loc = F->getLocation();
668 ND = F;
669 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
670 DiagKind = 1;
671 Loc = VD->getLocation();
672 ND = VD;
673 } else {
674 DiagKind = 0;
675 if (const auto *E = P.getFieldDesc()->asExpr())
676 Loc = E->getExprLoc();
677 }
678
679 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
680 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
681 << AK << DiagKind << ND;
682 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
683 return false;
684}
685
686bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
687 AccessKinds AK) {
688 assert(Ptr.isLive());
689 assert(!Ptr.isInitialized());
690 return DiagnoseUninitialized(S, OpPC, Extern: Ptr.isExtern(), Desc: Ptr.getDeclDesc(), AK);
691}
692
693bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, bool Extern,
694 const Descriptor *Desc, AccessKinds AK) {
695 if (Extern && S.checkingPotentialConstantExpression())
696 return false;
697
698 if (const auto *VD = Desc->asVarDecl();
699 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
700
701 if (VD == S.EvaluatingDecl &&
702 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
703 if (!S.getLangOpts().CPlusPlus14 &&
704 !VD->getType().isConstant(Ctx: S.getASTContext())) {
705 // Diagnose as non-const read.
706 diagnoseNonConstVariable(S, OpPC, VD);
707 } else {
708 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
709 // Diagnose as "read of object outside its lifetime".
710 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
711 << AK << /*IsIndeterminate=*/false;
712 }
713 return false;
714 }
715
716 if (VD->getAnyInitializer()) {
717 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
718 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
719 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
720 } else {
721 diagnoseMissingInitializer(S, OpPC, VD);
722 }
723 return false;
724 }
725
726 if (!S.checkingPotentialConstantExpression()) {
727 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
728 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
729 }
730 return false;
731}
732
733static bool CheckLifetime(InterpState &S, CodePtr OpPC, Lifetime LT,
734 AccessKinds AK) {
735 if (LT == Lifetime::Started)
736 return true;
737
738 if (!S.checkingPotentialConstantExpression()) {
739 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
740 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
741 }
742 return false;
743}
744
745static bool CheckWeak(InterpState &S, CodePtr OpPC, const Block *B) {
746 if (!B->isWeak())
747 return true;
748
749 const auto *VD = B->getDescriptor()->asVarDecl();
750 assert(VD);
751 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
752 << VD;
753 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
754
755 return false;
756}
757
758// The list of checks here is just the one from CheckLoad, but with the
759// ones removed that are impossible on primitive global values.
760// For example, since those can't be members of structs, they also can't
761// be mutable.
762bool CheckGlobalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
763 const auto &Desc = B->getBlockDesc<GlobalInlineDescriptor>();
764 if (!B->isAccessible()) {
765 if (!CheckExtern(S, OpPC, Ptr: Pointer(const_cast<Block *>(B))))
766 return false;
767 if (!CheckDummy(S, OpPC, B, AK: AK_Read))
768 return false;
769 return CheckWeak(S, OpPC, B);
770 }
771
772 if (!CheckConstant(S, OpPC, Desc: B->getDescriptor()))
773 return false;
774 if (Desc.InitState != GlobalInitState::Initialized)
775 return DiagnoseUninitialized(S, OpPC, Extern: B->isExtern(), Desc: B->getDescriptor(),
776 AK: AK_Read);
777 if (!CheckTemporary(S, OpPC, B, AK: AK_Read))
778 return false;
779 if (B->getDescriptor()->IsVolatile) {
780 if (!S.getLangOpts().CPlusPlus)
781 return Invalid(S, OpPC);
782
783 const ValueDecl *D = B->getDescriptor()->asValueDecl();
784 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
785 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
786 << AK_Read << 1 << D;
787 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
788 return false;
789 }
790 return true;
791}
792
793// Similarly, for local loads.
794bool CheckLocalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
795 assert(!B->isExtern());
796 const auto &Desc = *reinterpret_cast<const InlineDescriptor *>(B->rawData());
797 if (!CheckLifetime(S, OpPC, LT: Desc.LifeState, AK: AK_Read))
798 return false;
799 if (!Desc.IsInitialized)
800 return DiagnoseUninitialized(S, OpPC, /*Extern=*/false, Desc: B->getDescriptor(),
801 AK: AK_Read);
802 if (B->getDescriptor()->IsVolatile) {
803 if (!S.getLangOpts().CPlusPlus)
804 return Invalid(S, OpPC);
805
806 const ValueDecl *D = B->getDescriptor()->asValueDecl();
807 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
808 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
809 << AK_Read << 1 << D;
810 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
811 return false;
812 }
813 return true;
814}
815
816bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
817 AccessKinds AK) {
818 if (Ptr.isZero()) {
819 const auto &Src = S.Current->getSource(PC: OpPC);
820
821 if (Ptr.isField())
822 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
823 else
824 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
825 return false;
826 }
827 // Block pointers are the only ones we can actually read from.
828 if (!Ptr.isBlockPointer())
829 return false;
830
831 if (!Ptr.block()->isAccessible()) {
832 if (!CheckLive(S, OpPC, Ptr, AK))
833 return false;
834 if (!CheckExtern(S, OpPC, Ptr))
835 return false;
836 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK))
837 return false;
838 return CheckWeak(S, OpPC, B: Ptr.block());
839 }
840
841 if (!CheckConstant(S, OpPC, Ptr))
842 return false;
843 if (!CheckRange(S, OpPC, Ptr, AK))
844 return false;
845 if (!CheckActive(S, OpPC, Ptr, AK))
846 return false;
847 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK))
848 return false;
849 if (!Ptr.isInitialized())
850 return DiagnoseUninitialized(S, OpPC, Ptr, AK);
851 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK))
852 return false;
853
854 if (!CheckMutable(S, OpPC, Ptr))
855 return false;
856 if (!CheckVolatile(S, OpPC, Ptr, AK))
857 return false;
858 if (!Ptr.isConst() && !S.inConstantContext() && isConstexprUnknown(P: Ptr))
859 return false;
860 return true;
861}
862
863/// This is not used by any of the opcodes directly. It's used by
864/// EvalEmitter to do the final lvalue-to-rvalue conversion.
865bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
866 assert(!Ptr.isZero());
867 if (!Ptr.isBlockPointer())
868 return false;
869
870 if (!Ptr.block()->isAccessible()) {
871 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
872 return false;
873 if (!CheckExtern(S, OpPC, Ptr))
874 return false;
875 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Read))
876 return false;
877 return CheckWeak(S, OpPC, B: Ptr.block());
878 }
879
880 if (!CheckConstant(S, OpPC, Ptr))
881 return false;
882
883 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
884 return false;
885 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Read))
886 return false;
887 if (!Ptr.isInitialized())
888 return DiagnoseUninitialized(S, OpPC, Ptr, AK: AK_Read);
889 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Read))
890 return false;
891 if (!CheckMutable(S, OpPC, Ptr))
892 return false;
893 return true;
894}
895
896bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
897 bool WillBeActivated) {
898 if (!Ptr.isBlockPointer() || Ptr.isZero())
899 return false;
900
901 if (!Ptr.block()->isAccessible()) {
902 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
903 return false;
904 if (!CheckExtern(S, OpPC, Ptr))
905 return false;
906 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Assign);
907 }
908 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Assign))
909 return false;
910 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
911 return false;
912 if (!CheckActive(S, OpPC, Ptr, AK: AK_Assign, WillActivate: WillBeActivated))
913 return false;
914 if (!CheckGlobal(S, OpPC, Ptr))
915 return false;
916 if (!CheckConst(S, OpPC, Ptr))
917 return false;
918 if (!CheckVolatile(S, OpPC, Ptr, AK: AK_Assign))
919 return false;
920 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
921 return false;
922 return true;
923}
924
925static bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
926 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
927 return false;
928 if (!Ptr.isDummy()) {
929 if (!CheckExtern(S, OpPC, Ptr))
930 return false;
931 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
932 return false;
933 }
934 return true;
935}
936
937bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
938 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
939 return false;
940 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
941 return false;
942 return true;
943}
944
945static bool diagnoseCallableDecl(InterpState &S, CodePtr OpPC,
946 const FunctionDecl *DiagDecl) {
947 // Bail out if the function declaration itself is invalid. We will
948 // have produced a relevant diagnostic while parsing it, so just
949 // note the problematic sub-expression.
950 if (DiagDecl->isInvalidDecl())
951 return Invalid(S, OpPC);
952
953 // Diagnose failed assertions specially.
954 if (S.Current->getLocation(PC: OpPC).isMacroID() && DiagDecl->getIdentifier()) {
955 // FIXME: Instead of checking for an implementation-defined function,
956 // check and evaluate the assert() macro.
957 StringRef Name = DiagDecl->getName();
958 bool AssertFailed =
959 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
960 if (AssertFailed) {
961 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
962 DiagId: diag::note_constexpr_assert_failed);
963 return false;
964 }
965 }
966
967 if (!S.getLangOpts().CPlusPlus11) {
968 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
969 DiagId: diag::note_invalid_subexpr_in_const_expr);
970 return false;
971 }
972
973 // Invalid decls have been diagnosed before.
974 if (DiagDecl->isInvalidDecl())
975 return false;
976
977 // If this function is not constexpr because it is an inherited
978 // non-constexpr constructor, diagnose that directly.
979 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
980 if (CD && CD->isInheritingConstructor()) {
981 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
982 if (!Inherited->isConstexpr())
983 DiagDecl = CD = Inherited;
984 }
985
986 // Silently reject constructors of invalid classes. The invalid class
987 // has been rejected elsewhere before.
988 if (CD && CD->getParent()->isInvalidDecl())
989 return false;
990
991 // FIXME: If DiagDecl is an implicitly-declared special member function
992 // or an inheriting constructor, we should be much more explicit about why
993 // it's not constexpr.
994 if (CD && CD->isInheritingConstructor()) {
995 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_invalid_inhctor,
996 ExtraNotes: 1)
997 << CD->getInheritedConstructor().getConstructor()->getParent();
998 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
999 } else {
1000 // Don't emit anything if the function isn't defined and we're checking
1001 // for a constant expression. It might be defined at the point we're
1002 // actually calling it.
1003 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
1004 bool IsDefined = DiagDecl->isDefined();
1005 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
1006 S.checkingPotentialConstantExpression())
1007 return false;
1008
1009 // If the declaration is defined, declared 'constexpr' _and_ has a body,
1010 // the below diagnostic doesn't add anything useful.
1011 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && DiagDecl->hasBody())
1012 return false;
1013
1014 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
1015 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
1016 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
1017
1018 if (DiagDecl->getDefinition())
1019 S.Note(Loc: DiagDecl->getDefinition()->getLocation(), DiagId: diag::note_declared_at);
1020 else
1021 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
1022 }
1023
1024 return false;
1025}
1026
1027static bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
1028 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
1029 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1030 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
1031 return false;
1032 }
1033
1034 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1035 return false;
1036
1037 if (F->isValid() && F->hasBody() &&
1038 (F->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1039 F->getDecl()->hasAttr<MSConstexprAttr>())))
1040 return true;
1041
1042 const FunctionDecl *DiagDecl = F->getDecl();
1043 const FunctionDecl *Definition = nullptr;
1044 DiagDecl->getBody(Definition);
1045
1046 if (!Definition && S.checkingPotentialConstantExpression() &&
1047 DiagDecl->isConstexpr()) {
1048 return false;
1049 }
1050
1051 // Implicitly constexpr.
1052 if (F->isLambdaStaticInvoker())
1053 return true;
1054
1055 return diagnoseCallableDecl(S, OpPC, DiagDecl);
1056}
1057
1058static bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
1059 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
1060 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1061 DiagId: diag::note_constexpr_depth_limit_exceeded)
1062 << S.getLangOpts().ConstexprCallDepth;
1063 return false;
1064 }
1065
1066 return true;
1067}
1068
1069bool CheckThis(InterpState &S, CodePtr OpPC) {
1070 if (S.Current->hasThisPointer())
1071 return true;
1072
1073 const Expr *E = S.Current->getExpr(PC: OpPC);
1074 if (S.getLangOpts().CPlusPlus11) {
1075 bool IsImplicit = false;
1076 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
1077 IsImplicit = TE->isImplicit();
1078 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
1079 } else {
1080 S.FFDiag(E);
1081 }
1082
1083 return false;
1084}
1085
1086bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
1087 APFloat::opStatus Status, FPOptions FPO) {
1088 // [expr.pre]p4:
1089 // If during the evaluation of an expression, the result is not
1090 // mathematically defined [...], the behavior is undefined.
1091 // FIXME: C++ rules require us to not conform to IEEE 754 here.
1092 if (Result.isNan()) {
1093 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1094 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
1095 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
1096 return S.noteUndefinedBehavior();
1097 }
1098
1099 // In a constant context, assume that any dynamic rounding mode or FP
1100 // exception state matches the default floating-point environment.
1101 if (S.inConstantContext())
1102 return true;
1103
1104 if ((Status & APFloat::opInexact) &&
1105 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
1106 // Inexact result means that it depends on rounding mode. If the requested
1107 // mode is dynamic, the evaluation cannot be made in compile time.
1108 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1109 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
1110 return false;
1111 }
1112
1113 if ((Status != APFloat::opOK) &&
1114 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
1115 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
1116 FPO.getAllowFEnvAccess())) {
1117 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1118 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1119 return false;
1120 }
1121
1122 if ((Status & APFloat::opStatus::opInvalidOp) &&
1123 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1124 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1125 // There is no usefully definable result.
1126 S.FFDiag(SI: E);
1127 return false;
1128 }
1129
1130 return true;
1131}
1132
1133bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1134 if (S.getLangOpts().CPlusPlus20)
1135 return true;
1136
1137 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1138 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1139 return true;
1140}
1141
1142bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1143 DynamicAllocator::Form AllocForm,
1144 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1145 const Expr *NewExpr) {
1146 if (AllocForm == DeleteForm)
1147 return true;
1148
1149 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1150
1151 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1152 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1153 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1154 << TypeToDiagnose;
1155 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1156 << NewExpr->getSourceRange();
1157 return false;
1158}
1159
1160bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1161 const Pointer &Ptr) {
1162 // Regular new type(...) call.
1163 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1164 return true;
1165 // operator new.
1166 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1167 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1168 return true;
1169 // std::allocator.allocate() call
1170 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1171 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1172 return true;
1173
1174 // Whatever this is, we didn't heap allocate it.
1175 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1176 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1177 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1178
1179 if (Ptr.isTemporary())
1180 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1181 else
1182 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1183 return false;
1184}
1185
1186/// We aleady know the given DeclRefExpr is invalid for some reason,
1187/// now figure out why and print appropriate diagnostics.
1188bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1189 const ValueDecl *D = DR->getDecl();
1190 return diagnoseUnknownDecl(S, OpPC, D);
1191}
1192
1193bool InvalidDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR,
1194 bool InitializerFailed) {
1195 assert(DR);
1196
1197 if (InitializerFailed) {
1198 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1199 const auto *VD = cast<VarDecl>(Val: DR->getDecl());
1200 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
1201 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
1202 return false;
1203 }
1204
1205 return CheckDeclRef(S, OpPC, DR);
1206}
1207
1208bool CheckDummy(InterpState &S, CodePtr OpPC, const Block *B, AccessKinds AK) {
1209 if (!B->isDummy())
1210 return true;
1211
1212 const ValueDecl *D = B->getDescriptor()->asValueDecl();
1213 if (!D)
1214 return false;
1215
1216 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1217 return diagnoseUnknownDecl(S, OpPC, D);
1218
1219 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1220 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1221 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1222 }
1223 return false;
1224}
1225
1226static bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1227 const CallExpr *CE, unsigned ArgSize) {
1228 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1229 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1230 unsigned Offset = 0;
1231 unsigned Index = 0;
1232 for (const Expr *Arg : Args) {
1233 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1234 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1235 if (ArgPtr.isZero()) {
1236 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1237 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1238 return false;
1239 }
1240 }
1241
1242 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(PT: PT_Ptr)));
1243 ++Index;
1244 }
1245 return true;
1246}
1247
1248static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1249 const Pointer &BasePtr,
1250 const Descriptor *Desc) {
1251 assert(Desc->isRecord());
1252 const Record *R = Desc->ElemRecord;
1253 assert(R);
1254
1255 if (!S.Current->isBottomFrame() && S.Current->hasThisPointer() &&
1256 S.Current->getFunction()->isDestructor() &&
1257 Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis())) {
1258 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1259 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1260 return false;
1261 }
1262
1263 // Destructor of this record.
1264 const CXXDestructorDecl *Dtor = R->getDestructor();
1265 assert(Dtor);
1266 assert(!Dtor->isTrivial());
1267 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1268 if (!DtorFunc)
1269 return false;
1270
1271 S.Stk.push<Pointer>(Args: BasePtr);
1272 return Call(S, OpPC, Func: DtorFunc, VarArgSize: 0);
1273}
1274
1275static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1276 assert(B);
1277 const Descriptor *Desc = B->getDescriptor();
1278
1279 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1280 return true;
1281
1282 assert(Desc->isRecord() || Desc->isCompositeArray());
1283
1284 if (Desc->hasTrivialDtor())
1285 return true;
1286
1287 if (Desc->isCompositeArray()) {
1288 unsigned N = Desc->getNumElems();
1289 if (N == 0)
1290 return true;
1291 const Descriptor *ElemDesc = Desc->ElemDesc;
1292 assert(ElemDesc->isRecord());
1293
1294 Pointer RP(const_cast<Block *>(B));
1295 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1296 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1297 return false;
1298 }
1299 return true;
1300 }
1301
1302 assert(Desc->isRecord());
1303 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1304}
1305
1306static bool hasVirtualDestructor(QualType T) {
1307 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1308 if (const CXXDestructorDecl *DD = RD->getDestructor())
1309 return DD->isVirtual();
1310 return false;
1311}
1312
1313bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1314 bool IsGlobalDelete) {
1315 if (!CheckDynamicMemoryAllocation(S, OpPC))
1316 return false;
1317
1318 DynamicAllocator &Allocator = S.getAllocator();
1319
1320 const Expr *Source = nullptr;
1321 const Block *BlockToDelete = nullptr;
1322 {
1323 // Extra scope for this so the block doesn't have this pointer
1324 // pointing to it when we destroy it.
1325 Pointer Ptr = S.Stk.pop<Pointer>();
1326
1327 // Deleteing nullptr is always fine.
1328 if (Ptr.isZero())
1329 return true;
1330
1331 // Remove base casts.
1332 QualType InitialType = Ptr.getType();
1333 Ptr = Ptr.stripBaseCasts();
1334
1335 Source = Ptr.getDeclDesc()->asExpr();
1336 BlockToDelete = Ptr.block();
1337
1338 // Check that new[]/delete[] or new/delete were used, not a mixture.
1339 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1340 if (std::optional<DynamicAllocator::Form> AllocForm =
1341 Allocator.getAllocationForm(Source)) {
1342 DynamicAllocator::Form DeleteForm =
1343 DeleteIsArrayForm ? DynamicAllocator::Form::Array
1344 : DynamicAllocator::Form::NonArray;
1345 if (!CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1346 NewExpr: Source))
1347 return false;
1348 }
1349
1350 // For the non-array case, the types must match if the static type
1351 // does not have a virtual destructor.
1352 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1353 !hasVirtualDestructor(T: InitialType)) {
1354 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1355 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1356 << InitialType << Ptr.getType();
1357 return false;
1358 }
1359
1360 if (!Ptr.isRoot() || (Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray()) ||
1361 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1362 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1363 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1364 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1365 return false;
1366 }
1367
1368 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1369 return false;
1370
1371 // For a class type with a virtual destructor, the selected operator delete
1372 // is the one looked up when building the destructor.
1373 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1374 QualType AllocType = Ptr.getType();
1375 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1376 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1377 if (const CXXDestructorDecl *DD = RD->getDestructor())
1378 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1379 return nullptr;
1380 };
1381
1382 if (const FunctionDecl *VirtualDelete =
1383 getVirtualOperatorDelete(AllocType);
1384 VirtualDelete &&
1385 !VirtualDelete
1386 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1387 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1388 DiagId: diag::note_constexpr_new_non_replaceable)
1389 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1390 return false;
1391 }
1392 }
1393 }
1394 assert(Source);
1395 assert(BlockToDelete);
1396
1397 // Invoke destructors before deallocating the memory.
1398 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1399 return false;
1400
1401 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1402 // Nothing has been deallocated, this must be a double-delete.
1403 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1404 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1405 return false;
1406 }
1407
1408 return true;
1409}
1410
1411void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1412 const APSInt &Value) {
1413 llvm::APInt Min;
1414 llvm::APInt Max;
1415 ED->getValueRange(Max, Min);
1416 --Max;
1417
1418 if (ED->getNumNegativeBits() &&
1419 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1420 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1421 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1422 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1423 << ED;
1424 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1425 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1426 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1427 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1428 << ED;
1429 }
1430}
1431
1432bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1433 assert(T);
1434 assert(!S.getLangOpts().CPlusPlus23);
1435
1436 // C++1y: A constant initializer for an object o [...] may also invoke
1437 // constexpr constructors for o and its subobjects even if those objects
1438 // are of non-literal class types.
1439 //
1440 // C++11 missed this detail for aggregates, so classes like this:
1441 // struct foo_t { union { int i; volatile int j; } u; };
1442 // are not (obviously) initializable like so:
1443 // __attribute__((__require_constant_initialization__))
1444 // static const foo_t x = {{0}};
1445 // because "i" is a subobject with non-literal initialization (due to the
1446 // volatile member of the union). See:
1447 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1448 // Therefore, we use the C++1y behavior.
1449
1450 if (!S.Current->isBottomFrame() &&
1451 S.Current->getFunction()->isConstructor() &&
1452 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1453 return true;
1454 }
1455
1456 const Expr *E = S.Current->getExpr(PC: OpPC);
1457 if (S.getLangOpts().CPlusPlus11)
1458 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1459 else
1460 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1461 return false;
1462}
1463
1464static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1465 uint32_t Off) {
1466 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1467 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1468 return false;
1469
1470 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1471 return false;
1472 if (!CheckArray(S, OpPC, Ptr))
1473 return false;
1474 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1475 return false;
1476
1477 if (Ptr.isIntegralPointer()) {
1478 if (std::optional<IntPointer> IntPtr =
1479 Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off)) {
1480 S.Stk.push<Pointer>(Args: std::move(*IntPtr));
1481 return true;
1482 }
1483 return false;
1484 }
1485
1486 if (!Ptr.isBlockPointer()) {
1487 // FIXME: The only time we (seem to) get here is when trying to access a
1488 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1489 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1490 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1491 DiagId: diag::note_constexpr_access_unreadable_object)
1492 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1493 return false;
1494 }
1495
1496 // We can't get the field of something that's not a record.
1497 if (!Ptr.getFieldDesc()->isRecord())
1498 return false;
1499
1500 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1501 return false;
1502
1503 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1504 return true;
1505}
1506
1507bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1508 const auto &Ptr = S.Stk.peek<Pointer>();
1509 return getField(S, OpPC, Ptr, Off);
1510}
1511
1512bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1513 const auto &Ptr = S.Stk.pop<Pointer>();
1514 return getField(S, OpPC, Ptr, Off);
1515}
1516
1517static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1518 const Pointer &ThisPtr) {
1519 assert(Func->isConstructor());
1520
1521 if (Func->getParentDecl()->isInvalidDecl())
1522 return false;
1523
1524 const Descriptor *D = ThisPtr.getFieldDesc();
1525 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1526 // subobject of a composite array.
1527 if (!D->ElemRecord)
1528 return true;
1529
1530 if (D->ElemRecord->getNumVirtualBases() == 0)
1531 return true;
1532
1533 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1534 << Func->getParentDecl();
1535 return false;
1536}
1537
1538bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1539 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1540 return false;
1541 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1542 return false;
1543 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1544 return false;
1545 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Destroy))
1546 return false;
1547
1548 // Can't call a dtor on a global variable.
1549 if (Ptr.block()->isStatic()) {
1550 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1551 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1552 return false;
1553 }
1554 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1555}
1556
1557/// Opcode. Check if the function decl can be called at compile time.
1558bool CheckFunctionDecl(InterpState &S, CodePtr OpPC, const FunctionDecl *FD) {
1559 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1560 return false;
1561
1562 const FunctionDecl *Definition = nullptr;
1563 const Stmt *Body = FD->getBody(Definition);
1564
1565 if (Definition && Body &&
1566 (Definition->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1567 Definition->hasAttr<MSConstexprAttr>())))
1568 return true;
1569
1570 return diagnoseCallableDecl(S, OpPC, DiagDecl: FD);
1571}
1572
1573bool CheckBitCast(InterpState &S, CodePtr OpPC, const Type *TargetType,
1574 bool SrcIsVoidPtr) {
1575 const auto &Ptr = S.Stk.peek<Pointer>();
1576 if (Ptr.isZero())
1577 return true;
1578 if (!Ptr.isBlockPointer())
1579 return true;
1580
1581 if (TargetType->isIntegerType())
1582 return true;
1583
1584 if (SrcIsVoidPtr && S.getLangOpts().CPlusPlus) {
1585 bool HasValidResult = !Ptr.isZero();
1586
1587 if (HasValidResult) {
1588 if (S.getStdAllocatorCaller(Name: "allocate"))
1589 return true;
1590
1591 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
1592 if (S.getLangOpts().CPlusPlus26 &&
1593 S.getASTContext().hasSimilarType(T1: Ptr.getType(),
1594 T2: QualType(TargetType, 0)))
1595 return true;
1596
1597 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_void_star_cast)
1598 << E->getSubExpr()->getType() << S.getLangOpts().CPlusPlus26
1599 << Ptr.getType().getCanonicalType() << E->getType()->getPointeeType();
1600 } else if (!S.getLangOpts().CPlusPlus26) {
1601 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1602 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1603 << diag::ConstexprInvalidCastKind::CastFrom << "'void *'"
1604 << S.Current->getRange(PC: OpPC);
1605 }
1606 }
1607
1608 QualType PtrType = Ptr.getType();
1609 if (PtrType->isRecordType() &&
1610 PtrType->getAsRecordDecl() != TargetType->getAsRecordDecl()) {
1611 S.CCEDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_invalid_cast)
1612 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
1613 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1614 return false;
1615 }
1616 return true;
1617}
1618
1619static void compileFunction(InterpState &S, const Function *Func) {
1620 const FunctionDecl *Definition = Func->getDecl()->getDefinition();
1621 if (!Definition)
1622 return;
1623
1624 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1625 .compileFunc(FuncDecl: Definition, Func: const_cast<Function *>(Func));
1626}
1627
1628bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1629 uint32_t VarArgSize) {
1630 if (Func->hasThisPointer()) {
1631 size_t ArgSize = Func->getArgSize() + VarArgSize;
1632 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1633 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1634
1635 // If the current function is a lambda static invoker and
1636 // the function we're about to call is a lambda call operator,
1637 // skip the CheckInvoke, since the ThisPtr is a null pointer
1638 // anyway.
1639 if (!(S.Current->getFunction() &&
1640 S.Current->getFunction()->isLambdaStaticInvoker() &&
1641 Func->isLambdaCallOperator())) {
1642 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1643 return false;
1644 }
1645
1646 if (S.checkingPotentialConstantExpression())
1647 return false;
1648 }
1649
1650 if (!Func->isFullyCompiled())
1651 compileFunction(S, Func);
1652
1653 if (!CheckCallable(S, OpPC, F: Func))
1654 return false;
1655
1656 if (!CheckCallDepth(S, OpPC))
1657 return false;
1658
1659 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1660 InterpFrame *FrameBefore = S.Current;
1661 S.Current = NewFrame.get();
1662
1663 // Note that we cannot assert(CallResult.hasValue()) here since
1664 // Ret() above only sets the APValue if the curent frame doesn't
1665 // have a caller set.
1666 if (Interpret(S)) {
1667 NewFrame.release(); // Frame was delete'd already.
1668 assert(S.Current == FrameBefore);
1669 return true;
1670 }
1671
1672 // Interpreting the function failed somehow. Reset to
1673 // previous state.
1674 S.Current = FrameBefore;
1675 return false;
1676}
1677bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1678 uint32_t VarArgSize) {
1679
1680 // C doesn't have constexpr functions.
1681 if (!S.getLangOpts().CPlusPlus)
1682 return Invalid(S, OpPC);
1683
1684 assert(Func);
1685 auto cleanup = [&]() -> bool {
1686 cleanupAfterFunctionCall(S, OpPC, Func);
1687 return false;
1688 };
1689
1690 if (Func->hasThisPointer()) {
1691 size_t ArgSize = Func->getArgSize() + VarArgSize;
1692 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1693
1694 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1695
1696 // C++23 [expr.const]p5.6
1697 // an invocation of a virtual function ([class.virtual]) for an object whose
1698 // dynamic type is constexpr-unknown;
1699 if (ThisPtr.isDummy() && Func->isVirtual())
1700 return false;
1701
1702 // If the current function is a lambda static invoker and
1703 // the function we're about to call is a lambda call operator,
1704 // skip the CheckInvoke, since the ThisPtr is a null pointer
1705 // anyway.
1706 if (S.Current->getFunction() &&
1707 S.Current->getFunction()->isLambdaStaticInvoker() &&
1708 Func->isLambdaCallOperator()) {
1709 assert(ThisPtr.isZero());
1710 } else {
1711 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1712 return cleanup();
1713 if (!Func->isConstructor() && !Func->isDestructor() &&
1714 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1715 return false;
1716 }
1717
1718 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1719 return false;
1720 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1721 return false;
1722
1723 if (Func->isConstructor() || Func->isDestructor())
1724 S.InitializingBlocks.push_back(Elt: ThisPtr.block());
1725 }
1726
1727 if (!Func->isFullyCompiled())
1728 compileFunction(S, Func);
1729
1730 if (!CheckCallable(S, OpPC, F: Func))
1731 return cleanup();
1732
1733 // Do not evaluate any function calls in checkingPotentialConstantExpression
1734 // mode. Constructors will be aborted later when their initializers are
1735 // evaluated.
1736 if (S.checkingPotentialConstantExpression() && !Func->isConstructor())
1737 return false;
1738
1739 if (!CheckCallDepth(S, OpPC))
1740 return cleanup();
1741
1742 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1743 InterpFrame *FrameBefore = S.Current;
1744 S.Current = NewFrame.get();
1745
1746 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1747 // Note that we cannot assert(CallResult.hasValue()) here since
1748 // Ret() above only sets the APValue if the curent frame doesn't
1749 // have a caller set.
1750 bool Success = Interpret(S);
1751 // Remove initializing block again.
1752 if (Func->isConstructor() || Func->isDestructor())
1753 S.InitializingBlocks.pop_back();
1754
1755 if (!Success) {
1756 // Interpreting the function failed somehow. Reset to
1757 // previous state.
1758 S.Current = FrameBefore;
1759 return false;
1760 }
1761
1762 NewFrame.release(); // Frame was delete'd already.
1763 assert(S.Current == FrameBefore);
1764 return true;
1765}
1766
1767static bool GetDynamicDecl(InterpState &S, CodePtr OpPC, Pointer TypePtr,
1768 const CXXRecordDecl *&DynamicDecl) {
1769 TypePtr = TypePtr.stripBaseCasts();
1770
1771 QualType DynamicType = TypePtr.getType();
1772 if (TypePtr.isStatic() || TypePtr.isConst()) {
1773 if (const VarDecl *VD = TypePtr.getDeclDesc()->asVarDecl();
1774 VD && !VD->isConstexpr()) {
1775 const Expr *E = S.Current->getExpr(PC: OpPC);
1776 APValue V = TypePtr.toAPValue(ASTCtx: S.getASTContext());
1777 QualType TT = S.getASTContext().getLValueReferenceType(T: DynamicType);
1778 S.FFDiag(E, DiagId: diag::note_constexpr_polymorphic_unknown_dynamic_type)
1779 << AccessKinds::AK_MemberCall << V.getAsString(Ctx: S.getASTContext(), Ty: TT);
1780 return false;
1781 }
1782 }
1783
1784 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) {
1785 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1786 } else if (DynamicType->isArrayType()) {
1787 const Type *ElemType = DynamicType->getPointeeOrArrayElementType();
1788 assert(ElemType);
1789 DynamicDecl = ElemType->getAsCXXRecordDecl();
1790 } else {
1791 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1792 }
1793 return true;
1794}
1795
1796bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1797 uint32_t VarArgSize) {
1798 assert(Func->hasThisPointer());
1799 assert(Func->isVirtual());
1800 size_t ArgSize = Func->getArgSize() + VarArgSize;
1801 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1802 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1803 const FunctionDecl *Callee = Func->getDecl();
1804
1805 const CXXRecordDecl *DynamicDecl = nullptr;
1806 if (!GetDynamicDecl(S, OpPC, TypePtr: ThisPtr, DynamicDecl))
1807 return false;
1808 assert(DynamicDecl);
1809
1810 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1811 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1812 const CXXMethodDecl *Overrider;
1813
1814 if (StaticDecl != DynamicDecl &&
1815 !llvm::is_contained(Range&: S.InitializingBlocks, Element: ThisPtr.block())) {
1816 if (!DynamicDecl->isDerivedFrom(Base: StaticDecl))
1817 return false;
1818 Overrider = S.getContext().getOverridingFunction(DynamicDecl, StaticDecl,
1819 InitialFunction);
1820
1821 } else {
1822 Overrider = InitialFunction;
1823 }
1824
1825 // C++2a [class.abstract]p6:
1826 // the effect of making a virtual call to a pure virtual function [...] is
1827 // undefined
1828 if (Overrider->isPureVirtual()) {
1829 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1830 ExtraNotes: 1)
1831 << Callee;
1832 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1833 return false;
1834 }
1835
1836 if (Overrider != InitialFunction) {
1837 // DR1872: An instantiated virtual constexpr function can't be called in a
1838 // constant expression (prior to C++20). We can still constant-fold such a
1839 // call.
1840 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1841 const Expr *E = S.Current->getExpr(PC: OpPC);
1842 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1843 }
1844
1845 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1846
1847 const CXXRecordDecl *ThisFieldDecl =
1848 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1849 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1850 // If the function we call is further DOWN the hierarchy than the
1851 // FieldDesc of our pointer, just go up the hierarchy of this field
1852 // the furthest we can go.
1853 ThisPtr = ThisPtr.stripBaseCasts();
1854 }
1855 }
1856
1857 if (!Call(S, OpPC, Func, VarArgSize))
1858 return false;
1859
1860 // Covariant return types. The return type of Overrider is a pointer
1861 // or reference to a class type.
1862 if (Overrider != InitialFunction &&
1863 Overrider->getReturnType()->isPointerOrReferenceType() &&
1864 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1865 QualType OverriderPointeeType =
1866 Overrider->getReturnType()->getPointeeType();
1867 QualType InitialPointeeType =
1868 InitialFunction->getReturnType()->getPointeeType();
1869 // We've called Overrider above, but calling code expects us to return what
1870 // InitialFunction returned. According to the rules for covariant return
1871 // types, what InitialFunction returns needs to be a base class of what
1872 // Overrider returns. So, we need to do an upcast here.
1873 unsigned Offset = S.getContext().collectBaseOffset(
1874 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1875 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1876 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1877 }
1878
1879 return true;
1880}
1881
1882bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1883 uint32_t BuiltinID) {
1884 // A little arbitrary, but the current interpreter allows evaluation
1885 // of builtin functions in this mode, with some exceptions.
1886 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1887 S.checkingPotentialConstantExpression())
1888 return false;
1889
1890 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1891}
1892
1893bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1894 const CallExpr *CE) {
1895 const Pointer &Ptr = S.Stk.pop<Pointer>();
1896
1897 if (Ptr.isZero()) {
1898 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_null_callee)
1899 << const_cast<Expr *>(CE->getCallee()) << CE->getSourceRange();
1900 return false;
1901 }
1902
1903 if (!Ptr.isFunctionPointer())
1904 return Invalid(S, OpPC);
1905
1906 const Function *F = Ptr.asFunctionPointer().Func;
1907 assert(F);
1908 // Don't allow calling block pointers.
1909 if (!F->getDecl())
1910 return Invalid(S, OpPC);
1911
1912 // This happens when the call expression has been cast to
1913 // something else, but we don't support that.
1914 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1915 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1916 return false;
1917
1918 // Check argument nullability state.
1919 if (F->hasNonNullAttr()) {
1920 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1921 return false;
1922 }
1923
1924 // Can happen when casting function pointers around.
1925 QualType CalleeType = CE->getCallee()->getType();
1926 if (CalleeType->isPointerType() &&
1927 !S.getASTContext().hasSameFunctionTypeIgnoringExceptionSpec(
1928 T: F->getDecl()->getType(), U: CalleeType->getPointeeType())) {
1929 return false;
1930 }
1931
1932 // We nedd to compile (and check) early for function pointer calls
1933 // because the Call/CallVirt below might access the instance pointer
1934 // but the Function's information about them is wrong.
1935 if (!F->isFullyCompiled())
1936 compileFunction(S, Func: F);
1937
1938 if (!CheckCallable(S, OpPC, F))
1939 return false;
1940
1941 assert(ArgSize >= F->getWrittenArgSize());
1942 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1943
1944 // We need to do this explicitly here since we don't have the necessary
1945 // information to do it automatically.
1946 if (F->isThisPointerExplicit())
1947 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1948
1949 if (F->isVirtual())
1950 return CallVirt(S, OpPC, Func: F, VarArgSize);
1951
1952 return Call(S, OpPC, Func: F, VarArgSize);
1953}
1954
1955static void startLifetimeRecurse(const Pointer &Ptr) {
1956 if (const Record *R = Ptr.getRecord()) {
1957 Ptr.startLifetime();
1958 for (const Record::Field &Fi : R->fields())
1959 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1960 return;
1961 }
1962
1963 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1964 FieldDesc->isCompositeArray()) {
1965 assert(Ptr.getLifetime() == Lifetime::Started);
1966 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1967 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1968 return;
1969 }
1970
1971 Ptr.startLifetime();
1972}
1973
1974bool StartLifetime(InterpState &S, CodePtr OpPC) {
1975 const auto &Ptr = S.Stk.peek<Pointer>();
1976 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1977 return false;
1978 startLifetimeRecurse(Ptr: Ptr.narrow());
1979 return true;
1980}
1981
1982// FIXME: It might be better to the recursing as part of the generated code for
1983// a destructor?
1984static void endLifetimeRecurse(const Pointer &Ptr) {
1985 if (const Record *R = Ptr.getRecord()) {
1986 Ptr.endLifetime();
1987 for (const Record::Field &Fi : R->fields())
1988 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1989 return;
1990 }
1991
1992 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1993 FieldDesc->isCompositeArray()) {
1994 // No endLifetime() for array roots.
1995 assert(Ptr.getLifetime() == Lifetime::Started);
1996 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1997 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1998 return;
1999 }
2000
2001 Ptr.endLifetime();
2002}
2003
2004/// Ends the lifetime of the peek'd pointer.
2005bool EndLifetime(InterpState &S, CodePtr OpPC) {
2006 const auto &Ptr = S.Stk.peek<Pointer>();
2007 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2008 return false;
2009
2010 endLifetimeRecurse(Ptr: Ptr.narrow());
2011 return true;
2012}
2013
2014/// Ends the lifetime of the pop'd pointer.
2015bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
2016 const auto &Ptr = S.Stk.pop<Pointer>();
2017 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2018 return false;
2019
2020 endLifetimeRecurse(Ptr: Ptr.narrow());
2021 return true;
2022}
2023
2024bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
2025 std::optional<uint64_t> ArraySize) {
2026 const Pointer &Ptr = S.Stk.peek<Pointer>();
2027
2028 if (Ptr.inUnion() && Ptr.getBase().getRecord()->isUnion())
2029 Ptr.activate();
2030
2031 if (Ptr.isZero()) {
2032 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_null)
2033 << AK_Construct;
2034 return false;
2035 }
2036
2037 if (!Ptr.isBlockPointer())
2038 return false;
2039
2040 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2041 return false;
2042
2043 startLifetimeRecurse(Ptr);
2044
2045 // Similar to CheckStore(), but with the additional CheckTemporary() call and
2046 // the AccessKinds are different.
2047 if (!Ptr.block()->isAccessible()) {
2048 if (!CheckExtern(S, OpPC, Ptr))
2049 return false;
2050 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
2051 return false;
2052 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Construct);
2053 }
2054 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Construct))
2055 return false;
2056
2057 // CheckLifetime for this and all base pointers.
2058 for (Pointer P = Ptr;;) {
2059 if (!CheckLifetime(S, OpPC, LT: P.getLifetime(), AK: AK_Construct))
2060 return false;
2061
2062 if (P.isRoot())
2063 break;
2064 P = P.getBase();
2065 }
2066
2067 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2068 return false;
2069 if (!CheckGlobal(S, OpPC, Ptr))
2070 return false;
2071 if (!CheckConst(S, OpPC, Ptr))
2072 return false;
2073 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
2074 return false;
2075
2076 if (!InvalidNewDeleteExpr(S, OpPC, E))
2077 return false;
2078
2079 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
2080 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
2081 const ASTContext &ASTCtx = S.getASTContext();
2082 QualType AllocType;
2083 if (ArraySize) {
2084 AllocType = ASTCtx.getConstantArrayType(
2085 EltTy: NewExpr->getAllocatedType(),
2086 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
2087 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
2088 } else {
2089 AllocType = NewExpr->getAllocatedType();
2090 }
2091
2092 unsigned StorageSize = 1;
2093 unsigned AllocSize = 1;
2094 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
2095 AllocSize = CAT->getZExtSize();
2096 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
2097 StorageSize = CAT->getZExtSize();
2098
2099 if (AllocSize > StorageSize ||
2100 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
2101 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
2102 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
2103 DiagId: diag::note_constexpr_placement_new_wrong_type)
2104 << StorageType << AllocType;
2105 return false;
2106 }
2107
2108 // Can't activate fields in a union, unless the direct base is the union.
2109 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
2110 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
2111
2112 return true;
2113}
2114
2115bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
2116 assert(E);
2117
2118 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
2119 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
2120
2121 if (NewExpr->getNumPlacementArgs() > 0) {
2122 // This is allowed pre-C++26, but only an std function or if
2123 // [[msvc::constexpr]] was used.
2124 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction() ||
2125 S.Current->MSVCConstexprAllowed)
2126 return true;
2127
2128 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2129 << /*C++26 feature*/ 1 << E->getSourceRange();
2130 } else if (
2131 !OperatorNew
2132 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2133 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2134 DiagId: diag::note_constexpr_new_non_replaceable)
2135 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
2136 return false;
2137 } else if (!S.getLangOpts().CPlusPlus26 &&
2138 NewExpr->getNumPlacementArgs() == 1 &&
2139 !OperatorNew->isReservedGlobalPlacementOperator()) {
2140 if (!S.getLangOpts().CPlusPlus26) {
2141 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2142 << /*Unsupported*/ 0 << E->getSourceRange();
2143 return false;
2144 }
2145 return true;
2146 }
2147 } else {
2148 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
2149 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
2150 if (!OperatorDelete
2151 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2152 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2153 DiagId: diag::note_constexpr_new_non_replaceable)
2154 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
2155 return false;
2156 }
2157 }
2158
2159 return false;
2160}
2161
2162bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
2163 const FixedPoint &FP) {
2164 const Expr *E = S.Current->getExpr(PC: OpPC);
2165 if (S.checkingForUndefinedBehavior()) {
2166 S.getASTContext().getDiagnostics().Report(
2167 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
2168 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2169 }
2170 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
2171 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2172 return S.noteUndefinedBehavior();
2173}
2174
2175bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
2176 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
2177 S.FFDiag(SI: Loc,
2178 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
2179 << Index;
2180 return false;
2181}
2182
2183bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
2184 const Pointer &Ptr, unsigned BitWidth) {
2185 const SourceInfo &E = S.Current->getSource(PC: OpPC);
2186 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
2187 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2188
2189 if (Ptr.isDummy())
2190 return false;
2191 if (Ptr.isFunctionPointer())
2192 return true;
2193
2194 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
2195 // Only allow based lvalue casts if they are lossless.
2196 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
2197 BitWidth)
2198 return Invalid(S, OpPC);
2199 }
2200 return true;
2201}
2202
2203bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2204 const Pointer &Ptr = S.Stk.pop<Pointer>();
2205
2206 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2207 return false;
2208
2209 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
2210 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2211
2212 S.Stk.push<IntegralAP<false>>(Args&: Result);
2213 return true;
2214}
2215
2216bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2217 const Pointer &Ptr = S.Stk.pop<Pointer>();
2218
2219 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2220 return false;
2221
2222 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
2223 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2224
2225 S.Stk.push<IntegralAP<true>>(Args&: Result);
2226 return true;
2227}
2228
2229bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
2230 bool TargetIsUCharOrByte) {
2231 // This is always fine.
2232 if (!HasIndeterminateBits)
2233 return true;
2234
2235 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
2236 if (TargetIsUCharOrByte)
2237 return true;
2238
2239 const Expr *E = S.Current->getExpr(PC: OpPC);
2240 QualType ExprType = E->getType();
2241 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
2242 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
2243 return false;
2244}
2245
2246bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
2247 const Type *TypeInfoType) {
2248 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
2249 return true;
2250}
2251
2252bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
2253 const auto &P = S.Stk.pop<Pointer>();
2254
2255 if (!P.isBlockPointer())
2256 return false;
2257
2258 // Pick the most-derived type.
2259 CanQualType T = P.getDeclPtr().getType()->getCanonicalTypeUnqualified();
2260 // ... unless we're currently constructing this object.
2261 // FIXME: We have a similar check to this in more places.
2262 if (S.Current->getFunction()) {
2263 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
2264 if (const Function *Func = Frame->getFunction();
2265 Func && (Func->isConstructor() || Func->isDestructor()) &&
2266 P.block() == Frame->getThis().block()) {
2267 T = S.getContext().getASTContext().getCanonicalTagType(
2268 TD: Func->getParentDecl());
2269 break;
2270 }
2271 }
2272 }
2273
2274 S.Stk.push<Pointer>(Args: T->getTypePtr(), Args&: TypeInfoType);
2275 return true;
2276}
2277
2278bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2279 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2280 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2281 << E->getExprOperand()->getType()
2282 << E->getExprOperand()->getSourceRange();
2283 return false;
2284}
2285
2286bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2287 const Pointer &RHS) {
2288 unsigned LHSOffset = LHS.isOnePastEnd() ? LHS.getNumElems() : LHS.getIndex();
2289 unsigned RHSOffset = RHS.isOnePastEnd() ? RHS.getNumElems() : RHS.getIndex();
2290 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2291 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2292
2293 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2294 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2295 int32_t IndexDiff = RHSOffset - LHSOffset;
2296 if (IndexDiff < 0) {
2297 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2298 return false;
2299 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2300 } else {
2301 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2302 return false;
2303 RHSStr = RHSStr.drop_front(N: IndexDiff);
2304 }
2305
2306 unsigned ShorterCharWidth;
2307 StringRef Shorter;
2308 StringRef Longer;
2309 if (LHSLength < RHSLength) {
2310 ShorterCharWidth = LHS.elemSize();
2311 Shorter = LHSStr;
2312 Longer = RHSStr;
2313 } else {
2314 ShorterCharWidth = RHS.elemSize();
2315 Shorter = RHSStr;
2316 Longer = LHSStr;
2317 }
2318
2319 // The null terminator isn't included in the string data, so check for it
2320 // manually. If the longer string doesn't have a null terminator where the
2321 // shorter string ends, they aren't potentially overlapping.
2322 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2323 if (Shorter.size() + NullByte >= Longer.size())
2324 break;
2325 if (Longer[Shorter.size() + NullByte])
2326 return false;
2327 }
2328 return Shorter == Longer.take_front(N: Shorter.size());
2329}
2330
2331static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2332 PrimType T) {
2333 if (T == PT_IntAPS) {
2334 auto &Val = Ptr.deref<IntegralAP<true>>();
2335 if (!Val.singleWord()) {
2336 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2337 Val.take(NewMemory);
2338 }
2339 } else if (T == PT_IntAP) {
2340 auto &Val = Ptr.deref<IntegralAP<false>>();
2341 if (!Val.singleWord()) {
2342 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2343 Val.take(NewMemory);
2344 }
2345 } else if (T == PT_Float) {
2346 auto &Val = Ptr.deref<Floating>();
2347 if (!Val.singleWord()) {
2348 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2349 Val.take(NewMemory);
2350 }
2351 } else if (T == PT_MemberPtr) {
2352 auto &Val = Ptr.deref<MemberPointer>();
2353 unsigned PathLength = Val.getPathLength();
2354 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2355 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2356 Val.takePath(NewPath);
2357 }
2358}
2359
2360template <typename T>
2361static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2362 assert(needsAlloc<T>());
2363 if constexpr (std::is_same_v<T, MemberPointer>) {
2364 auto &Val = Ptr.deref<MemberPointer>();
2365 unsigned PathLength = Val.getPathLength();
2366 auto *NewPath = new (S.P) const CXXRecordDecl *[PathLength];
2367 std::copy_n(first: Val.path(), n: PathLength, result: NewPath);
2368 Val.takePath(NewPath);
2369 } else {
2370 auto &Val = Ptr.deref<T>();
2371 if (!Val.singleWord()) {
2372 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2373 Val.take(NewMemory);
2374 }
2375 }
2376}
2377
2378static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2379 if (const Record *R = Ptr.getRecord()) {
2380 for (const Record::Field &Fi : R->fields()) {
2381 if (Fi.Desc->isPrimitive()) {
2382 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2383 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2384 });
2385 } else {
2386 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2387 }
2388 }
2389 return;
2390 }
2391
2392 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2393 unsigned NumElems = D->getNumElems();
2394 if (NumElems == 0)
2395 return;
2396
2397 if (D->isPrimitiveArray()) {
2398 PrimType PT = D->getPrimType();
2399 if (!needsAlloc(T: PT))
2400 return;
2401 assert(NumElems >= 1);
2402 const Pointer EP = Ptr.atIndex(Idx: 0);
2403 bool AllSingleWord = true;
2404 TYPE_SWITCH_ALLOC(PT, {
2405 if (!EP.deref<T>().singleWord()) {
2406 copyPrimitiveMemory<T>(S, EP);
2407 AllSingleWord = false;
2408 }
2409 });
2410 if (AllSingleWord)
2411 return;
2412 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2413 const Pointer EP = Ptr.atIndex(Idx: I);
2414 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2415 }
2416 } else {
2417 assert(D->isCompositeArray());
2418 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2419 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2420 finishGlobalRecurse(S, Ptr: EP);
2421 }
2422 }
2423 }
2424}
2425
2426bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2427 const Pointer &Ptr = S.Stk.pop<Pointer>();
2428
2429 finishGlobalRecurse(S, Ptr);
2430 if (Ptr.canBeInitialized()) {
2431 Ptr.initialize();
2432 Ptr.activate();
2433 }
2434
2435 return true;
2436}
2437
2438bool InvalidCast(InterpState &S, CodePtr OpPC, CastKind Kind, bool Fatal) {
2439 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
2440
2441 switch (Kind) {
2442 case CastKind::Reinterpret:
2443 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2444 << diag::ConstexprInvalidCastKind::Reinterpret
2445 << S.Current->getRange(PC: OpPC);
2446 return !Fatal;
2447 case CastKind::ReinterpretLike:
2448 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2449 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
2450 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2451 return !Fatal;
2452 case CastKind::Volatile:
2453 if (!S.checkingPotentialConstantExpression()) {
2454 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
2455 if (S.getLangOpts().CPlusPlus)
2456 S.FFDiag(E, DiagId: diag::note_constexpr_access_volatile_type)
2457 << AK_Read << E->getSubExpr()->getType();
2458 else
2459 S.FFDiag(E);
2460 }
2461
2462 return false;
2463 case CastKind::Dynamic:
2464 assert(!S.getLangOpts().CPlusPlus20);
2465 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2466 << diag::ConstexprInvalidCastKind::Dynamic;
2467 return true;
2468 }
2469 llvm_unreachable("Unhandled CastKind");
2470 return false;
2471}
2472
2473bool Destroy(InterpState &S, CodePtr OpPC, uint32_t I) {
2474 assert(S.Current->getFunction());
2475 // FIXME: We iterate the scope once here and then again in the destroy() call
2476 // below.
2477 for (auto &Local : S.Current->getFunction()->getScope(Idx: I).locals_reverse()) {
2478 if (!S.Current->getLocalBlock(Offset: Local.Offset)->isInitialized())
2479 continue;
2480 const Pointer &Ptr = S.Current->getLocalPointer(Offset: Local.Offset);
2481 if (Ptr.getLifetime() == Lifetime::Ended) {
2482 // Try to use the declaration for better diagnostics
2483 if (const Decl *D = Ptr.getDeclDesc()->asDecl()) {
2484 auto *ND = cast<NamedDecl>(Val: D);
2485 S.FFDiag(Loc: ND->getLocation(),
2486 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2487 << ND->getNameAsString();
2488 } else {
2489 S.FFDiag(Loc: Ptr.getDeclDesc()->getLocation(),
2490 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2491 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
2492 }
2493 return false;
2494 }
2495 }
2496
2497 S.Current->destroy(Idx: I);
2498 return true;
2499}
2500
2501// Perform a cast towards the class of the Decl (either up or down the
2502// hierarchy).
2503static bool castBackMemberPointer(InterpState &S,
2504 const MemberPointer &MemberPtr,
2505 int32_t BaseOffset,
2506 const RecordDecl *BaseDecl) {
2507 const CXXRecordDecl *Expected;
2508 if (MemberPtr.getPathLength() >= 2)
2509 Expected = MemberPtr.getPathEntry(Index: MemberPtr.getPathLength() - 2);
2510 else
2511 Expected = MemberPtr.getRecordDecl();
2512
2513 assert(Expected);
2514 if (Expected->getCanonicalDecl() != BaseDecl->getCanonicalDecl()) {
2515 // C++11 [expr.static.cast]p12: In a conversion from (D::*) to (B::*),
2516 // if B does not contain the original member and is not a base or
2517 // derived class of the class containing the original member, the result
2518 // of the cast is undefined.
2519 // C++11 [conv.mem]p2 does not cover this case for a cast from (B::*) to
2520 // (D::*). We consider that to be a language defect.
2521 return false;
2522 }
2523
2524 unsigned OldPathLength = MemberPtr.getPathLength();
2525 unsigned NewPathLength = OldPathLength - 1;
2526 bool IsDerivedMember = NewPathLength != 0;
2527 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2528 std::copy_n(first: MemberPtr.path(), n: NewPathLength, result: NewPath);
2529
2530 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2531 Path: NewPath, NewIsDerived: IsDerivedMember));
2532 return true;
2533}
2534
2535static bool appendToMemberPointer(InterpState &S,
2536 const MemberPointer &MemberPtr,
2537 int32_t BaseOffset,
2538 const RecordDecl *BaseDecl,
2539 bool IsDerivedMember) {
2540 unsigned OldPathLength = MemberPtr.getPathLength();
2541 unsigned NewPathLength = OldPathLength + 1;
2542
2543 auto NewPath = S.allocMemberPointerPath(Length: NewPathLength);
2544 std::copy_n(first: MemberPtr.path(), n: OldPathLength, result: NewPath);
2545 NewPath[OldPathLength] = cast<CXXRecordDecl>(Val: BaseDecl);
2546
2547 S.Stk.push<MemberPointer>(Args: MemberPtr.atInstanceBase(Offset: BaseOffset, PathLength: NewPathLength,
2548 Path: NewPath, NewIsDerived: IsDerivedMember));
2549 return true;
2550}
2551
2552/// DerivedToBaseMemberPointer
2553bool CastMemberPtrBasePop(InterpState &S, CodePtr OpPC, int32_t Off,
2554 const RecordDecl *BaseDecl) {
2555 const auto &Ptr = S.Stk.pop<MemberPointer>();
2556
2557 if (!Ptr.isDerivedMember() && Ptr.hasPath())
2558 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2559
2560 bool IsDerivedMember = Ptr.isDerivedMember() || !Ptr.hasPath();
2561 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl, IsDerivedMember);
2562}
2563
2564/// BaseToDerivedMemberPointer
2565bool CastMemberPtrDerivedPop(InterpState &S, CodePtr OpPC, int32_t Off,
2566 const RecordDecl *BaseDecl) {
2567 const auto &Ptr = S.Stk.pop<MemberPointer>();
2568
2569 if (!Ptr.isDerivedMember()) {
2570 // Simply append.
2571 return appendToMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl,
2572 /*IsDerivedMember=*/false);
2573 }
2574
2575 return castBackMemberPointer(S, MemberPtr: Ptr, BaseOffset: Off, BaseDecl);
2576}
2577
2578// https://github.com/llvm/llvm-project/issues/102513
2579#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2580#pragma optimize("", off)
2581#endif
2582bool Interpret(InterpState &S) {
2583 // The current stack frame when we started Interpret().
2584 // This is being used by the ops to determine wheter
2585 // to return from this function and thus terminate
2586 // interpretation.
2587 const InterpFrame *StartFrame = S.Current;
2588 assert(!S.Current->isRoot());
2589 CodePtr PC = S.Current->getPC();
2590
2591 // Empty program.
2592 if (!PC)
2593 return true;
2594
2595 for (;;) {
2596 auto Op = PC.read<Opcode>();
2597 CodePtr OpPC = PC;
2598
2599 switch (Op) {
2600#define GET_INTERP
2601#include "Opcodes.inc"
2602#undef GET_INTERP
2603 }
2604 }
2605}
2606// https://github.com/llvm/llvm-project/issues/102513
2607#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2608#pragma optimize("", on)
2609#endif
2610
2611} // namespace interp
2612} // namespace clang
2613