1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31static bool RetValue(InterpState &S, CodePtr &Pt) {
32 llvm::report_fatal_error(reason: "Interpreter cannot return values");
33}
34
35//===----------------------------------------------------------------------===//
36// Jmp, Jt, Jf
37//===----------------------------------------------------------------------===//
38
39static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
40 PC += Offset;
41 return true;
42}
43
44static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
45 if (S.Stk.pop<bool>()) {
46 PC += Offset;
47 }
48 return true;
49}
50
51static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
52 if (!S.Stk.pop<bool>()) {
53 PC += Offset;
54 }
55 return true;
56}
57
58// https://github.com/llvm/llvm-project/issues/102513
59#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
60#pragma optimize("", off)
61#endif
62// FIXME: We have the large switch over all opcodes here again, and in
63// Interpret().
64static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset, PrimType PT) {
65 [[maybe_unused]] CodePtr PCBefore = RealPC;
66 size_t StackSizeBefore = S.Stk.size();
67
68 auto SpeculativeInterp = [&S, RealPC]() -> bool {
69 const InterpFrame *StartFrame = S.Current;
70 CodePtr PC = RealPC;
71
72 for (;;) {
73 auto Op = PC.read<Opcode>();
74 if (Op == OP_EndSpeculation)
75 return true;
76 CodePtr OpPC = PC;
77
78 switch (Op) {
79#define GET_INTERP
80#include "Opcodes.inc"
81#undef GET_INTERP
82 }
83 }
84 llvm_unreachable("We didn't see an EndSpeculation op?");
85 };
86
87 if (SpeculativeInterp()) {
88 if (PT == PT_Ptr) {
89 const auto &Ptr = S.Stk.pop<Pointer>();
90 assert(S.Stk.size() == StackSizeBefore);
91 S.Stk.push<Integral<32, true>>(
92 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
93 } else {
94 // Pop the result from the stack and return success.
95 TYPE_SWITCH(PT, S.Stk.pop<T>(););
96 assert(S.Stk.size() == StackSizeBefore);
97 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
98 }
99 } else {
100 if (!S.inConstantContext())
101 return Invalid(S, OpPC: RealPC);
102
103 S.Stk.clearTo(NewSize: StackSizeBefore);
104 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
105 }
106
107 // RealPC should not have been modified.
108 assert(*RealPC == *PCBefore);
109
110 // Jump to end label. This is a little tricker than just RealPC += Offset
111 // because our usual jump instructions don't have any arguments, to the offset
112 // we get is a little too much and we need to subtract the size of the
113 // bool and PrimType arguments again.
114 int32_t ParamSize = align(Size: sizeof(PrimType));
115 assert(Offset >= ParamSize);
116 RealPC += Offset - ParamSize;
117
118 [[maybe_unused]] CodePtr PCCopy = RealPC;
119 assert(PCCopy.read<Opcode>() == OP_EndSpeculation);
120
121 return true;
122}
123// https://github.com/llvm/llvm-project/issues/102513
124#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
125#pragma optimize("", on)
126#endif
127
128static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
129 const ValueDecl *VD) {
130 const SourceInfo &E = S.Current->getSource(PC: OpPC);
131 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
132 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
133}
134
135static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
136 const ValueDecl *VD);
137static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
138 const ValueDecl *D) {
139 // This function tries pretty hard to produce a good diagnostic. Just skip
140 // that if nobody will see it anyway.
141 if (!S.diagnosing())
142 return false;
143
144 if (isa<ParmVarDecl>(Val: D)) {
145 if (D->getType()->isReferenceType()) {
146 if (S.inConstantContext() && S.getLangOpts().CPlusPlus &&
147 !S.getLangOpts().CPlusPlus11) {
148 diagnoseNonConstVariable(S, OpPC, VD: D);
149 return false;
150 }
151 }
152
153 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
154 if (S.getLangOpts().CPlusPlus23 && D->getType()->isReferenceType()) {
155 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_unknown_variable, ExtraNotes: 1)
156 << AK_Read << D;
157 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
158 } else if (S.getLangOpts().CPlusPlus11) {
159 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown, ExtraNotes: 1) << D;
160 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
161 } else {
162 S.FFDiag(SI: Loc);
163 }
164 return false;
165 }
166
167 if (!D->getType().isConstQualified()) {
168 diagnoseNonConstVariable(S, OpPC, VD: D);
169 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
170 if (!VD->getAnyInitializer()) {
171 diagnoseMissingInitializer(S, OpPC, VD);
172 } else {
173 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
174 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
175 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
176 }
177 }
178
179 return false;
180}
181
182static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
183 const ValueDecl *VD) {
184 if (!S.diagnosing())
185 return;
186
187 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
188 if (!S.getLangOpts().CPlusPlus) {
189 S.FFDiag(SI: Loc);
190 return;
191 }
192
193 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
194 VarD && VarD->getType().isConstQualified() &&
195 !VarD->getAnyInitializer()) {
196 diagnoseMissingInitializer(S, OpPC, VD);
197 return;
198 }
199
200 // Rather random, but this is to match the diagnostic output of the current
201 // interpreter.
202 if (isa<ObjCIvarDecl>(Val: VD))
203 return;
204
205 if (VD->getType()->isIntegralOrEnumerationType()) {
206 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
207 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
208 return;
209 }
210
211 S.FFDiag(SI: Loc,
212 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
213 : diag::note_constexpr_ltor_non_integral,
214 ExtraNotes: 1)
215 << VD << VD->getType();
216 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
217}
218
219static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Block *B,
220 AccessKinds AK) {
221 if (B->getDeclID()) {
222 if (!(B->isStatic() && B->isTemporary()))
223 return true;
224
225 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
226 Val: B->getDescriptor()->asExpr());
227 if (!MTE)
228 return true;
229
230 // FIXME(perf): Since we do this check on every Load from a static
231 // temporary, it might make sense to cache the value of the
232 // isUsableInConstantExpressions call.
233 if (B->getEvalID() != S.Ctx.getEvalID() &&
234 !MTE->isUsableInConstantExpressions(Context: S.getASTContext())) {
235 const SourceInfo &E = S.Current->getSource(PC: OpPC);
236 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
237 S.Note(Loc: B->getDescriptor()->getLocation(),
238 DiagId: diag::note_constexpr_temporary_here);
239 return false;
240 }
241 }
242 return true;
243}
244
245static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
246 if (auto ID = Ptr.getDeclID()) {
247 if (!Ptr.isStatic())
248 return true;
249
250 if (S.P.getCurrentDecl() == ID)
251 return true;
252
253 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
254 return false;
255 }
256 return true;
257}
258
259namespace clang {
260namespace interp {
261static void popArg(InterpState &S, const Expr *Arg) {
262 PrimType Ty = S.getContext().classify(E: Arg).value_or(PT: PT_Ptr);
263 TYPE_SWITCH(Ty, S.Stk.discard<T>());
264}
265
266void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
267 const Function *Func) {
268 assert(S.Current);
269 assert(Func);
270
271 if (S.Current->Caller && Func->isVariadic()) {
272 // CallExpr we're look for is at the return PC of the current function, i.e.
273 // in the caller.
274 // This code path should be executed very rarely.
275 unsigned NumVarArgs;
276 const Expr *const *Args = nullptr;
277 unsigned NumArgs = 0;
278 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
279 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
280 Args = CE->getArgs();
281 NumArgs = CE->getNumArgs();
282 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
283 Args = CE->getArgs();
284 NumArgs = CE->getNumArgs();
285 } else
286 assert(false && "Can't get arguments from that expression type");
287
288 assert(NumArgs >= Func->getNumWrittenParams());
289 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
290 isa<CXXOperatorCallExpr>(Val: CallSite));
291 for (unsigned I = 0; I != NumVarArgs; ++I) {
292 const Expr *A = Args[NumArgs - 1 - I];
293 popArg(S, Arg: A);
294 }
295 }
296
297 // And in any case, remove the fixed parameters (the non-variadic ones)
298 // at the end.
299 for (const Function::ParamDescriptor &PDesc : Func->args_reverse())
300 TYPE_SWITCH(PDesc.T, S.Stk.discard<T>());
301}
302
303bool isConstexprUnknown(const Pointer &P) {
304 if (!P.isBlockPointer())
305 return false;
306
307 if (P.isDummy())
308 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
309
310 return P.getDeclDesc()->IsConstexprUnknown;
311}
312
313bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
314 if (Ptr.isDummy())
315 return false;
316 if (Ptr.isZero())
317 return true;
318 if (Ptr.isFunctionPointer())
319 return false;
320 if (Ptr.isIntegralPointer())
321 return true;
322 if (Ptr.isTypeidPointer())
323 return true;
324
325 if (Ptr.getType()->isAnyComplexType())
326 return true;
327
328 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
329 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
330 return false;
331}
332
333bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
334 AccessKinds AK, bool WillActivate) {
335 if (Ptr.isActive())
336 return true;
337
338 assert(Ptr.inUnion());
339
340 // Find the outermost union.
341 Pointer U = Ptr.getBase();
342 Pointer C = Ptr;
343 while (!U.isRoot() && !U.isActive()) {
344 // A little arbitrary, but this is what the current interpreter does.
345 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
346 // GCC's output is more similar to what we would get without
347 // this condition.
348 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
349 break;
350
351 C = U;
352 U = U.getBase();
353 }
354 assert(C.isField());
355 assert(C.getBase() == U);
356
357 // Consider:
358 // union U {
359 // struct {
360 // int x;
361 // int y;
362 // } a;
363 // }
364 //
365 // When activating x, we will also activate a. If we now try to read
366 // from y, we will get to CheckActive, because y is not active. In that
367 // case, our U will be a (not a union). We return here and let later code
368 // handle this.
369 if (!U.getFieldDesc()->isUnion())
370 return true;
371
372 // When we will activate Ptr, check that none of the unions in its path have a
373 // non-trivial default constructor.
374 if (WillActivate) {
375 bool Fails = false;
376 Pointer It = Ptr;
377 while (!It.isRoot() && !It.isActive()) {
378 if (const Record *R = It.getRecord(); R && R->isUnion()) {
379 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(Val: R->getDecl());
380 CXXRD && !CXXRD->hasTrivialDefaultConstructor()) {
381 Fails = true;
382 break;
383 }
384 }
385 It = It.getBase();
386 }
387 if (!Fails)
388 return true;
389 }
390
391 // Get the inactive field descriptor.
392 assert(!C.isActive());
393 const FieldDecl *InactiveField = C.getField();
394 assert(InactiveField);
395
396 // Find the active field of the union.
397 const Record *R = U.getRecord();
398 assert(R && R->isUnion() && "Not a union");
399
400 const FieldDecl *ActiveField = nullptr;
401 for (const Record::Field &F : R->fields()) {
402 const Pointer &Field = U.atField(Off: F.Offset);
403 if (Field.isActive()) {
404 ActiveField = Field.getField();
405 break;
406 }
407 }
408
409 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
410 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
411 << AK << InactiveField << !ActiveField << ActiveField;
412 return false;
413}
414
415bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
416 if (!Ptr.isExtern())
417 return true;
418
419 if (!Ptr.isPastEnd() &&
420 (Ptr.isInitialized() ||
421 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)))
422 return true;
423
424 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
425 Ptr.isConst())
426 return false;
427
428 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
429 diagnoseNonConstVariable(S, OpPC, VD);
430 return false;
431}
432
433bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
434 if (!Ptr.isUnknownSizeArray())
435 return true;
436 const SourceInfo &E = S.Current->getSource(PC: OpPC);
437 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
438 return false;
439}
440
441bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
442 AccessKinds AK) {
443 if (Ptr.isZero()) {
444 const auto &Src = S.Current->getSource(PC: OpPC);
445
446 if (Ptr.isField())
447 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
448 else
449 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
450
451 return false;
452 }
453
454 if (!Ptr.isLive()) {
455 const auto &Src = S.Current->getSource(PC: OpPC);
456
457 if (Ptr.isDynamic()) {
458 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
459 } else if (!S.checkingPotentialConstantExpression()) {
460 bool IsTemp = Ptr.isTemporary();
461 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_uninit)
462 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
463
464 if (IsTemp)
465 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
466 else
467 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
468 }
469
470 return false;
471 }
472
473 return true;
474}
475
476bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
477 assert(Desc);
478
479 const auto *D = Desc->asVarDecl();
480 if (!D || D == S.EvaluatingDecl || D->isConstexpr())
481 return true;
482
483 // If we're evaluating the initializer for a constexpr variable in C23, we may
484 // only read other contexpr variables. Abort here since this one isn't
485 // constexpr.
486 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
487 VD && VD->isConstexpr() && S.getLangOpts().C23)
488 return Invalid(S, OpPC);
489
490 QualType T = D->getType();
491 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
492 if (T->isIntegralOrEnumerationType()) {
493 if (!IsConstant) {
494 diagnoseNonConstVariable(S, OpPC, VD: D);
495 return false;
496 }
497 return true;
498 }
499
500 if (IsConstant) {
501 if (S.getLangOpts().CPlusPlus) {
502 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
503 DiagId: S.getLangOpts().CPlusPlus11
504 ? diag::note_constexpr_ltor_non_constexpr
505 : diag::note_constexpr_ltor_non_integral,
506 ExtraNotes: 1)
507 << D << T;
508 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
509 } else {
510 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
511 }
512 return true;
513 }
514
515 if (T->isPointerOrReferenceType()) {
516 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
517 !S.getLangOpts().CPlusPlus11) {
518 diagnoseNonConstVariable(S, OpPC, VD: D);
519 return false;
520 }
521 return true;
522 }
523
524 diagnoseNonConstVariable(S, OpPC, VD: D);
525 return false;
526}
527
528static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
529 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
530 return true;
531 if (!Ptr.getDeclID())
532 return true;
533 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
534}
535
536bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
537 CheckSubobjectKind CSK) {
538 if (!Ptr.isZero())
539 return true;
540 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
541 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
542 << CSK << S.Current->getRange(PC: OpPC);
543
544 return false;
545}
546
547bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
548 AccessKinds AK) {
549 if (!Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray())
550 return true;
551 if (S.getLangOpts().CPlusPlus) {
552 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
553 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
554 << AK << S.Current->getRange(PC: OpPC);
555 }
556 return false;
557}
558
559bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
560 CheckSubobjectKind CSK) {
561 if (!Ptr.isElementPastEnd() && !Ptr.isZeroSizeArray())
562 return true;
563 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
564 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
565 << CSK << S.Current->getRange(PC: OpPC);
566 return false;
567}
568
569bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
570 CheckSubobjectKind CSK) {
571 if (!Ptr.isOnePastEnd())
572 return true;
573
574 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
575 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
576 << CSK << S.Current->getRange(PC: OpPC);
577 return false;
578}
579
580bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
581 uint32_t Offset) {
582 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
583 uint32_t PtrOffset = Ptr.getByteOffset();
584
585 // We subtract Offset from PtrOffset. The result must be at least
586 // MinOffset.
587 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
588 return true;
589
590 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
591 QualType TargetQT = E->getType()->getPointeeType();
592 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
593
594 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
595 << MostDerivedQT << TargetQT;
596
597 return false;
598}
599
600bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
601 assert(Ptr.isLive() && "Pointer is not live");
602 if (!Ptr.isConst())
603 return true;
604
605 if (Ptr.isMutable() && !Ptr.isConstInMutable())
606 return true;
607
608 if (!Ptr.isBlockPointer())
609 return false;
610
611 // The This pointer is writable in constructors and destructors,
612 // even if isConst() returns true.
613 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
614 return true;
615
616 const QualType Ty = Ptr.getType();
617 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
618 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
619 return false;
620}
621
622bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
623 assert(Ptr.isLive() && "Pointer is not live");
624 if (!Ptr.isMutable())
625 return true;
626
627 // In C++14 onwards, it is permitted to read a mutable member whose
628 // lifetime began within the evaluation.
629 if (S.getLangOpts().CPlusPlus14 &&
630 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) {
631 // FIXME: This check is necessary because (of the way) we revisit
632 // variables in Compiler.cpp:visitDeclRef. Revisiting a so far
633 // unknown variable will get the same EvalID and we end up allowing
634 // reads from mutable members of it.
635 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
636 return false;
637 return true;
638 }
639
640 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
641 const FieldDecl *Field = Ptr.getField();
642 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
643 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
644 return false;
645}
646
647static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
648 AccessKinds AK) {
649 assert(Ptr.isLive());
650
651 if (!Ptr.isVolatile())
652 return true;
653
654 if (!S.getLangOpts().CPlusPlus)
655 return Invalid(S, OpPC);
656
657 // Volatile object can be written-to and read if they are being constructed.
658 if (llvm::is_contained(Range&: S.InitializingBlocks, Element: Ptr.block()))
659 return true;
660
661 // The reason why Ptr is volatile might be further up the hierarchy.
662 // Find that pointer.
663 Pointer P = Ptr;
664 while (!P.isRoot()) {
665 if (P.getType().isVolatileQualified())
666 break;
667 P = P.getBase();
668 }
669
670 const NamedDecl *ND = nullptr;
671 int DiagKind;
672 SourceLocation Loc;
673 if (const auto *F = P.getField()) {
674 DiagKind = 2;
675 Loc = F->getLocation();
676 ND = F;
677 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
678 DiagKind = 1;
679 Loc = VD->getLocation();
680 ND = VD;
681 } else {
682 DiagKind = 0;
683 if (const auto *E = P.getFieldDesc()->asExpr())
684 Loc = E->getExprLoc();
685 }
686
687 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
688 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
689 << AK << DiagKind << ND;
690 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
691 return false;
692}
693
694bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
695 AccessKinds AK) {
696 assert(Ptr.isLive());
697 assert(!Ptr.isInitialized());
698 return DiagnoseUninitialized(S, OpPC, Extern: Ptr.isExtern(), Desc: Ptr.getDeclDesc(), AK);
699}
700
701bool DiagnoseUninitialized(InterpState &S, CodePtr OpPC, bool Extern,
702 const Descriptor *Desc, AccessKinds AK) {
703 if (Extern && S.checkingPotentialConstantExpression())
704 return false;
705
706 if (const auto *VD = Desc->asVarDecl();
707 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
708
709 if (VD == S.EvaluatingDecl &&
710 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
711 if (!S.getLangOpts().CPlusPlus14 &&
712 !VD->getType().isConstant(Ctx: S.getASTContext())) {
713 // Diagnose as non-const read.
714 diagnoseNonConstVariable(S, OpPC, VD);
715 } else {
716 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
717 // Diagnose as "read of object outside its lifetime".
718 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
719 << AK << /*IsIndeterminate=*/false;
720 }
721 return false;
722 }
723
724 if (VD->getAnyInitializer()) {
725 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
726 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
727 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
728 } else {
729 diagnoseMissingInitializer(S, OpPC, VD);
730 }
731 return false;
732 }
733
734 if (!S.checkingPotentialConstantExpression()) {
735 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
736 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
737 }
738 return false;
739}
740
741static bool CheckLifetime(InterpState &S, CodePtr OpPC, Lifetime LT,
742 AccessKinds AK) {
743 if (LT == Lifetime::Started)
744 return true;
745
746 if (!S.checkingPotentialConstantExpression()) {
747 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
748 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
749 }
750 return false;
751}
752
753static bool CheckWeak(InterpState &S, CodePtr OpPC, const Block *B) {
754 if (!B->isWeak())
755 return true;
756
757 const auto *VD = B->getDescriptor()->asVarDecl();
758 assert(VD);
759 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
760 << VD;
761 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
762
763 return false;
764}
765
766// The list of checks here is just the one from CheckLoad, but with the
767// ones removed that are impossible on primitive global values.
768// For example, since those can't be members of structs, they also can't
769// be mutable.
770bool CheckGlobalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
771 const auto &Desc = B->getBlockDesc<GlobalInlineDescriptor>();
772 if (!B->isAccessible()) {
773 if (!CheckExtern(S, OpPC, Ptr: Pointer(const_cast<Block *>(B))))
774 return false;
775 if (!CheckDummy(S, OpPC, B, AK: AK_Read))
776 return false;
777 return CheckWeak(S, OpPC, B);
778 }
779
780 if (!CheckConstant(S, OpPC, Desc: B->getDescriptor()))
781 return false;
782 if (Desc.InitState != GlobalInitState::Initialized)
783 return DiagnoseUninitialized(S, OpPC, Extern: B->isExtern(), Desc: B->getDescriptor(),
784 AK: AK_Read);
785 if (!CheckTemporary(S, OpPC, B, AK: AK_Read))
786 return false;
787 if (B->getDescriptor()->IsVolatile) {
788 if (!S.getLangOpts().CPlusPlus)
789 return Invalid(S, OpPC);
790
791 const ValueDecl *D = B->getDescriptor()->asValueDecl();
792 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
793 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
794 << AK_Read << 1 << D;
795 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
796 return false;
797 }
798 return true;
799}
800
801// Similarly, for local loads.
802bool CheckLocalLoad(InterpState &S, CodePtr OpPC, const Block *B) {
803 assert(!B->isExtern());
804 const auto &Desc = *reinterpret_cast<const InlineDescriptor *>(B->rawData());
805 if (!CheckLifetime(S, OpPC, LT: Desc.LifeState, AK: AK_Read))
806 return false;
807 if (!Desc.IsInitialized)
808 return DiagnoseUninitialized(S, OpPC, /*Extern=*/false, Desc: B->getDescriptor(),
809 AK: AK_Read);
810 if (B->getDescriptor()->IsVolatile) {
811 if (!S.getLangOpts().CPlusPlus)
812 return Invalid(S, OpPC);
813
814 const ValueDecl *D = B->getDescriptor()->asValueDecl();
815 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
816 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
817 << AK_Read << 1 << D;
818 S.Note(Loc: D->getLocation(), DiagId: diag::note_constexpr_volatile_here) << 1;
819 return false;
820 }
821 return true;
822}
823
824bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
825 AccessKinds AK) {
826 if (Ptr.isZero()) {
827 const auto &Src = S.Current->getSource(PC: OpPC);
828
829 if (Ptr.isField())
830 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
831 else
832 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
833 return false;
834 }
835 // Block pointers are the only ones we can actually read from.
836 if (!Ptr.isBlockPointer())
837 return false;
838
839 if (!Ptr.block()->isAccessible()) {
840 if (!CheckLive(S, OpPC, Ptr, AK))
841 return false;
842 if (!CheckExtern(S, OpPC, Ptr))
843 return false;
844 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK))
845 return false;
846 return CheckWeak(S, OpPC, B: Ptr.block());
847 }
848
849 if (!CheckConstant(S, OpPC, Ptr))
850 return false;
851 if (!CheckRange(S, OpPC, Ptr, AK))
852 return false;
853 if (!CheckActive(S, OpPC, Ptr, AK))
854 return false;
855 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK))
856 return false;
857 if (!Ptr.isInitialized())
858 return DiagnoseUninitialized(S, OpPC, Ptr, AK);
859 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK))
860 return false;
861
862 if (!CheckMutable(S, OpPC, Ptr))
863 return false;
864 if (!CheckVolatile(S, OpPC, Ptr, AK))
865 return false;
866 if (!Ptr.isConst() && !S.inConstantContext() && isConstexprUnknown(P: Ptr))
867 return false;
868 return true;
869}
870
871/// This is not used by any of the opcodes directly. It's used by
872/// EvalEmitter to do the final lvalue-to-rvalue conversion.
873bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
874 assert(!Ptr.isZero());
875 if (!Ptr.isBlockPointer())
876 return false;
877
878 if (!Ptr.block()->isAccessible()) {
879 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
880 return false;
881 if (!CheckExtern(S, OpPC, Ptr))
882 return false;
883 if (!CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Read))
884 return false;
885 return CheckWeak(S, OpPC, B: Ptr.block());
886 }
887
888 if (!CheckConstant(S, OpPC, Ptr))
889 return false;
890
891 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
892 return false;
893 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Read))
894 return false;
895 if (!Ptr.isInitialized())
896 return DiagnoseUninitialized(S, OpPC, Ptr, AK: AK_Read);
897 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Read))
898 return false;
899 if (!CheckMutable(S, OpPC, Ptr))
900 return false;
901 return true;
902}
903
904bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
905 bool WillBeActivated) {
906 if (!Ptr.isBlockPointer() || Ptr.isZero())
907 return false;
908
909 if (!Ptr.block()->isAccessible()) {
910 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
911 return false;
912 if (!CheckExtern(S, OpPC, Ptr))
913 return false;
914 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Assign);
915 }
916 if (!CheckLifetime(S, OpPC, LT: Ptr.getLifetime(), AK: AK_Assign))
917 return false;
918 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
919 return false;
920 if (!CheckActive(S, OpPC, Ptr, AK: AK_Assign, WillActivate: WillBeActivated))
921 return false;
922 if (!CheckGlobal(S, OpPC, Ptr))
923 return false;
924 if (!CheckConst(S, OpPC, Ptr))
925 return false;
926 if (!CheckVolatile(S, OpPC, Ptr, AK: AK_Assign))
927 return false;
928 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
929 return false;
930 return true;
931}
932
933static bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
934 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
935 return false;
936 if (!Ptr.isDummy()) {
937 if (!CheckExtern(S, OpPC, Ptr))
938 return false;
939 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
940 return false;
941 }
942 return true;
943}
944
945bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
946 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
947 return false;
948 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
949 return false;
950 return true;
951}
952
953static bool diagnoseCallableDecl(InterpState &S, CodePtr OpPC,
954 const FunctionDecl *DiagDecl) {
955 // Bail out if the function declaration itself is invalid. We will
956 // have produced a relevant diagnostic while parsing it, so just
957 // note the problematic sub-expression.
958 if (DiagDecl->isInvalidDecl())
959 return Invalid(S, OpPC);
960
961 // Diagnose failed assertions specially.
962 if (S.Current->getLocation(PC: OpPC).isMacroID() && DiagDecl->getIdentifier()) {
963 // FIXME: Instead of checking for an implementation-defined function,
964 // check and evaluate the assert() macro.
965 StringRef Name = DiagDecl->getName();
966 bool AssertFailed =
967 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
968 if (AssertFailed) {
969 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
970 DiagId: diag::note_constexpr_assert_failed);
971 return false;
972 }
973 }
974
975 if (!S.getLangOpts().CPlusPlus11) {
976 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
977 DiagId: diag::note_invalid_subexpr_in_const_expr);
978 return false;
979 }
980
981 // Invalid decls have been diagnosed before.
982 if (DiagDecl->isInvalidDecl())
983 return false;
984
985 // If this function is not constexpr because it is an inherited
986 // non-constexpr constructor, diagnose that directly.
987 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
988 if (CD && CD->isInheritingConstructor()) {
989 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
990 if (!Inherited->isConstexpr())
991 DiagDecl = CD = Inherited;
992 }
993
994 // Silently reject constructors of invalid classes. The invalid class
995 // has been rejected elsewhere before.
996 if (CD && CD->getParent()->isInvalidDecl())
997 return false;
998
999 // FIXME: If DiagDecl is an implicitly-declared special member function
1000 // or an inheriting constructor, we should be much more explicit about why
1001 // it's not constexpr.
1002 if (CD && CD->isInheritingConstructor()) {
1003 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_invalid_inhctor,
1004 ExtraNotes: 1)
1005 << CD->getInheritedConstructor().getConstructor()->getParent();
1006 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
1007 } else {
1008 // Don't emit anything if the function isn't defined and we're checking
1009 // for a constant expression. It might be defined at the point we're
1010 // actually calling it.
1011 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
1012 bool IsDefined = DiagDecl->isDefined();
1013 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
1014 S.checkingPotentialConstantExpression())
1015 return false;
1016
1017 // If the declaration is defined, declared 'constexpr' _and_ has a body,
1018 // the below diagnostic doesn't add anything useful.
1019 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && DiagDecl->hasBody())
1020 return false;
1021
1022 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
1023 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
1024 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
1025
1026 if (DiagDecl->getDefinition())
1027 S.Note(Loc: DiagDecl->getDefinition()->getLocation(), DiagId: diag::note_declared_at);
1028 else
1029 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
1030 }
1031
1032 return false;
1033}
1034
1035static bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
1036 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
1037 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1038 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
1039 return false;
1040 }
1041
1042 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1043 return false;
1044
1045 if (F->isValid() && F->hasBody() &&
1046 (F->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1047 F->getDecl()->hasAttr<MSConstexprAttr>())))
1048 return true;
1049
1050 const FunctionDecl *DiagDecl = F->getDecl();
1051 const FunctionDecl *Definition = nullptr;
1052 DiagDecl->getBody(Definition);
1053
1054 if (!Definition && S.checkingPotentialConstantExpression() &&
1055 DiagDecl->isConstexpr()) {
1056 return false;
1057 }
1058
1059 // Implicitly constexpr.
1060 if (F->isLambdaStaticInvoker())
1061 return true;
1062
1063 return diagnoseCallableDecl(S, OpPC, DiagDecl);
1064}
1065
1066static bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
1067 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
1068 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1069 DiagId: diag::note_constexpr_depth_limit_exceeded)
1070 << S.getLangOpts().ConstexprCallDepth;
1071 return false;
1072 }
1073
1074 return true;
1075}
1076
1077bool CheckThis(InterpState &S, CodePtr OpPC) {
1078 if (S.Current->hasThisPointer())
1079 return true;
1080
1081 const Expr *E = S.Current->getExpr(PC: OpPC);
1082 if (S.getLangOpts().CPlusPlus11) {
1083 bool IsImplicit = false;
1084 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
1085 IsImplicit = TE->isImplicit();
1086 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
1087 } else {
1088 S.FFDiag(E);
1089 }
1090
1091 return false;
1092}
1093
1094bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
1095 APFloat::opStatus Status, FPOptions FPO) {
1096 // [expr.pre]p4:
1097 // If during the evaluation of an expression, the result is not
1098 // mathematically defined [...], the behavior is undefined.
1099 // FIXME: C++ rules require us to not conform to IEEE 754 here.
1100 if (Result.isNan()) {
1101 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1102 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
1103 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
1104 return S.noteUndefinedBehavior();
1105 }
1106
1107 // In a constant context, assume that any dynamic rounding mode or FP
1108 // exception state matches the default floating-point environment.
1109 if (S.inConstantContext())
1110 return true;
1111
1112 if ((Status & APFloat::opInexact) &&
1113 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
1114 // Inexact result means that it depends on rounding mode. If the requested
1115 // mode is dynamic, the evaluation cannot be made in compile time.
1116 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1117 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
1118 return false;
1119 }
1120
1121 if ((Status != APFloat::opOK) &&
1122 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
1123 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
1124 FPO.getAllowFEnvAccess())) {
1125 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1126 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1127 return false;
1128 }
1129
1130 if ((Status & APFloat::opStatus::opInvalidOp) &&
1131 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1132 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1133 // There is no usefully definable result.
1134 S.FFDiag(SI: E);
1135 return false;
1136 }
1137
1138 return true;
1139}
1140
1141bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1142 if (S.getLangOpts().CPlusPlus20)
1143 return true;
1144
1145 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1146 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1147 return true;
1148}
1149
1150bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1151 DynamicAllocator::Form AllocForm,
1152 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1153 const Expr *NewExpr) {
1154 if (AllocForm == DeleteForm)
1155 return true;
1156
1157 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1158
1159 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1160 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1161 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1162 << TypeToDiagnose;
1163 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1164 << NewExpr->getSourceRange();
1165 return false;
1166}
1167
1168bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1169 const Pointer &Ptr) {
1170 // Regular new type(...) call.
1171 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1172 return true;
1173 // operator new.
1174 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1175 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1176 return true;
1177 // std::allocator.allocate() call
1178 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1179 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1180 return true;
1181
1182 // Whatever this is, we didn't heap allocate it.
1183 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1184 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1185 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1186
1187 if (Ptr.isTemporary())
1188 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1189 else
1190 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1191 return false;
1192}
1193
1194/// We aleady know the given DeclRefExpr is invalid for some reason,
1195/// now figure out why and print appropriate diagnostics.
1196bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1197 const ValueDecl *D = DR->getDecl();
1198 return diagnoseUnknownDecl(S, OpPC, D);
1199}
1200
1201bool InvalidDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR,
1202 bool InitializerFailed) {
1203 assert(DR);
1204
1205 if (InitializerFailed) {
1206 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1207 const auto *VD = cast<VarDecl>(Val: DR->getDecl());
1208 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
1209 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
1210 return false;
1211 }
1212
1213 return CheckDeclRef(S, OpPC, DR);
1214}
1215
1216bool CheckDummy(InterpState &S, CodePtr OpPC, const Block *B, AccessKinds AK) {
1217 if (!B->isDummy())
1218 return true;
1219
1220 const ValueDecl *D = B->getDescriptor()->asValueDecl();
1221 if (!D)
1222 return false;
1223
1224 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1225 return diagnoseUnknownDecl(S, OpPC, D);
1226
1227 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1228 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1229 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1230 }
1231 return false;
1232}
1233
1234static bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1235 const CallExpr *CE, unsigned ArgSize) {
1236 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1237 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1238 unsigned Offset = 0;
1239 unsigned Index = 0;
1240 for (const Expr *Arg : Args) {
1241 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1242 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1243 if (ArgPtr.isZero()) {
1244 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1245 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1246 return false;
1247 }
1248 }
1249
1250 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(PT: PT_Ptr)));
1251 ++Index;
1252 }
1253 return true;
1254}
1255
1256static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1257 const Pointer &BasePtr,
1258 const Descriptor *Desc) {
1259 assert(Desc->isRecord());
1260 const Record *R = Desc->ElemRecord;
1261 assert(R);
1262
1263 if (S.Current->hasThisPointer() && S.Current->getFunction()->isDestructor() &&
1264 Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis())) {
1265 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1266 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1267 return false;
1268 }
1269
1270 // Destructor of this record.
1271 const CXXDestructorDecl *Dtor = R->getDestructor();
1272 assert(Dtor);
1273 assert(!Dtor->isTrivial());
1274 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1275 if (!DtorFunc)
1276 return false;
1277
1278 S.Stk.push<Pointer>(Args: BasePtr);
1279 return Call(S, OpPC, Func: DtorFunc, VarArgSize: 0);
1280}
1281
1282static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1283 assert(B);
1284 const Descriptor *Desc = B->getDescriptor();
1285
1286 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1287 return true;
1288
1289 assert(Desc->isRecord() || Desc->isCompositeArray());
1290
1291 if (Desc->hasTrivialDtor())
1292 return true;
1293
1294 if (Desc->isCompositeArray()) {
1295 unsigned N = Desc->getNumElems();
1296 if (N == 0)
1297 return true;
1298 const Descriptor *ElemDesc = Desc->ElemDesc;
1299 assert(ElemDesc->isRecord());
1300
1301 Pointer RP(const_cast<Block *>(B));
1302 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1303 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1304 return false;
1305 }
1306 return true;
1307 }
1308
1309 assert(Desc->isRecord());
1310 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1311}
1312
1313static bool hasVirtualDestructor(QualType T) {
1314 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1315 if (const CXXDestructorDecl *DD = RD->getDestructor())
1316 return DD->isVirtual();
1317 return false;
1318}
1319
1320bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1321 bool IsGlobalDelete) {
1322 if (!CheckDynamicMemoryAllocation(S, OpPC))
1323 return false;
1324
1325 DynamicAllocator &Allocator = S.getAllocator();
1326
1327 const Expr *Source = nullptr;
1328 const Block *BlockToDelete = nullptr;
1329 {
1330 // Extra scope for this so the block doesn't have this pointer
1331 // pointing to it when we destroy it.
1332 Pointer Ptr = S.Stk.pop<Pointer>();
1333
1334 // Deleteing nullptr is always fine.
1335 if (Ptr.isZero())
1336 return true;
1337
1338 // Remove base casts.
1339 QualType InitialType = Ptr.getType();
1340 Ptr = Ptr.stripBaseCasts();
1341
1342 Source = Ptr.getDeclDesc()->asExpr();
1343 BlockToDelete = Ptr.block();
1344
1345 // Check that new[]/delete[] or new/delete were used, not a mixture.
1346 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1347 if (std::optional<DynamicAllocator::Form> AllocForm =
1348 Allocator.getAllocationForm(Source)) {
1349 DynamicAllocator::Form DeleteForm =
1350 DeleteIsArrayForm ? DynamicAllocator::Form::Array
1351 : DynamicAllocator::Form::NonArray;
1352 if (!CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1353 NewExpr: Source))
1354 return false;
1355 }
1356
1357 // For the non-array case, the types must match if the static type
1358 // does not have a virtual destructor.
1359 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1360 !hasVirtualDestructor(T: InitialType)) {
1361 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1362 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1363 << InitialType << Ptr.getType();
1364 return false;
1365 }
1366
1367 if (!Ptr.isRoot() || (Ptr.isOnePastEnd() && !Ptr.isZeroSizeArray()) ||
1368 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1369 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1370 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1371 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1372 return false;
1373 }
1374
1375 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1376 return false;
1377
1378 // For a class type with a virtual destructor, the selected operator delete
1379 // is the one looked up when building the destructor.
1380 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1381 QualType AllocType = Ptr.getType();
1382 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1383 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1384 if (const CXXDestructorDecl *DD = RD->getDestructor())
1385 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1386 return nullptr;
1387 };
1388
1389 if (const FunctionDecl *VirtualDelete =
1390 getVirtualOperatorDelete(AllocType);
1391 VirtualDelete &&
1392 !VirtualDelete
1393 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1394 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1395 DiagId: diag::note_constexpr_new_non_replaceable)
1396 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1397 return false;
1398 }
1399 }
1400 }
1401 assert(Source);
1402 assert(BlockToDelete);
1403
1404 // Invoke destructors before deallocating the memory.
1405 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1406 return false;
1407
1408 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1409 // Nothing has been deallocated, this must be a double-delete.
1410 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1411 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1412 return false;
1413 }
1414
1415 return true;
1416}
1417
1418void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1419 const APSInt &Value) {
1420 llvm::APInt Min;
1421 llvm::APInt Max;
1422 ED->getValueRange(Max, Min);
1423 --Max;
1424
1425 if (ED->getNumNegativeBits() &&
1426 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1427 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1428 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1429 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1430 << ED;
1431 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1432 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1433 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1434 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1435 << ED;
1436 }
1437}
1438
1439bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1440 assert(T);
1441 assert(!S.getLangOpts().CPlusPlus23);
1442
1443 // C++1y: A constant initializer for an object o [...] may also invoke
1444 // constexpr constructors for o and its subobjects even if those objects
1445 // are of non-literal class types.
1446 //
1447 // C++11 missed this detail for aggregates, so classes like this:
1448 // struct foo_t { union { int i; volatile int j; } u; };
1449 // are not (obviously) initializable like so:
1450 // __attribute__((__require_constant_initialization__))
1451 // static const foo_t x = {{0}};
1452 // because "i" is a subobject with non-literal initialization (due to the
1453 // volatile member of the union). See:
1454 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1455 // Therefore, we use the C++1y behavior.
1456
1457 if (!S.Current->isBottomFrame() &&
1458 S.Current->getFunction()->isConstructor() &&
1459 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1460 return true;
1461 }
1462
1463 const Expr *E = S.Current->getExpr(PC: OpPC);
1464 if (S.getLangOpts().CPlusPlus11)
1465 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1466 else
1467 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1468 return false;
1469}
1470
1471static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1472 uint32_t Off) {
1473 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1474 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1475 return false;
1476
1477 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1478 return false;
1479 if (!CheckArray(S, OpPC, Ptr))
1480 return false;
1481 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1482 return false;
1483
1484 if (Ptr.isIntegralPointer()) {
1485 if (std::optional<IntPointer> IntPtr =
1486 Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off)) {
1487 S.Stk.push<Pointer>(Args: std::move(*IntPtr));
1488 return true;
1489 }
1490 return false;
1491 }
1492
1493 if (!Ptr.isBlockPointer()) {
1494 // FIXME: The only time we (seem to) get here is when trying to access a
1495 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1496 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1497 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1498 DiagId: diag::note_constexpr_access_unreadable_object)
1499 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1500 return false;
1501 }
1502
1503 // We can't get the field of something that's not a record.
1504 if (!Ptr.getFieldDesc()->isRecord())
1505 return false;
1506
1507 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1508 return false;
1509
1510 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1511 return true;
1512}
1513
1514bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1515 const auto &Ptr = S.Stk.peek<Pointer>();
1516 return getField(S, OpPC, Ptr, Off);
1517}
1518
1519bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1520 const auto &Ptr = S.Stk.pop<Pointer>();
1521 return getField(S, OpPC, Ptr, Off);
1522}
1523
1524static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1525 const Pointer &ThisPtr) {
1526 assert(Func->isConstructor());
1527
1528 if (Func->getParentDecl()->isInvalidDecl())
1529 return false;
1530
1531 const Descriptor *D = ThisPtr.getFieldDesc();
1532 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1533 // subobject of a composite array.
1534 if (!D->ElemRecord)
1535 return true;
1536
1537 if (D->ElemRecord->getNumVirtualBases() == 0)
1538 return true;
1539
1540 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1541 << Func->getParentDecl();
1542 return false;
1543}
1544
1545bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1546 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1547 return false;
1548 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1549 return false;
1550 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1551 return false;
1552
1553 // Can't call a dtor on a global variable.
1554 if (Ptr.block()->isStatic()) {
1555 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1556 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1557 return false;
1558 }
1559 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1560}
1561
1562/// Opcode. Check if the function decl can be called at compile time.
1563bool CheckFunctionDecl(InterpState &S, CodePtr OpPC, const FunctionDecl *FD) {
1564 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
1565 return false;
1566
1567 const FunctionDecl *Definition = nullptr;
1568 const Stmt *Body = FD->getBody(Definition);
1569
1570 if (Definition && Body &&
1571 (Definition->isConstexpr() || (S.Current->MSVCConstexprAllowed &&
1572 Definition->hasAttr<MSConstexprAttr>())))
1573 return true;
1574
1575 return diagnoseCallableDecl(S, OpPC, DiagDecl: FD);
1576}
1577
1578bool CheckBitCast(InterpState &S, CodePtr OpPC, const Type *TargetType,
1579 bool SrcIsVoidPtr) {
1580 const auto &Ptr = S.Stk.peek<Pointer>();
1581 if (Ptr.isZero())
1582 return true;
1583 if (!Ptr.isBlockPointer())
1584 return true;
1585
1586 if (TargetType->isIntegerType())
1587 return true;
1588
1589 if (SrcIsVoidPtr && S.getLangOpts().CPlusPlus) {
1590 bool HasValidResult = !Ptr.isZero();
1591
1592 if (HasValidResult) {
1593 if (S.getStdAllocatorCaller(Name: "allocate"))
1594 return true;
1595
1596 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
1597 if (S.getLangOpts().CPlusPlus26 &&
1598 S.getASTContext().hasSimilarType(T1: Ptr.getType(),
1599 T2: QualType(TargetType, 0)))
1600 return true;
1601
1602 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_void_star_cast)
1603 << E->getSubExpr()->getType() << S.getLangOpts().CPlusPlus26
1604 << Ptr.getType().getCanonicalType() << E->getType()->getPointeeType();
1605 } else if (!S.getLangOpts().CPlusPlus26) {
1606 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1607 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1608 << diag::ConstexprInvalidCastKind::CastFrom << "'void *'"
1609 << S.Current->getRange(PC: OpPC);
1610 }
1611 }
1612
1613 QualType PtrType = Ptr.getType();
1614 if (PtrType->isRecordType() &&
1615 PtrType->getAsRecordDecl() != TargetType->getAsRecordDecl()) {
1616 S.CCEDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_invalid_cast)
1617 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
1618 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1619 return false;
1620 }
1621 return true;
1622}
1623
1624static void compileFunction(InterpState &S, const Function *Func) {
1625 const FunctionDecl *Definition = Func->getDecl()->getDefinition();
1626 if (!Definition)
1627 return;
1628
1629 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1630 .compileFunc(FuncDecl: Definition, Func: const_cast<Function *>(Func));
1631}
1632
1633bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1634 uint32_t VarArgSize) {
1635 if (Func->hasThisPointer()) {
1636 size_t ArgSize = Func->getArgSize() + VarArgSize;
1637 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1638 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1639
1640 // If the current function is a lambda static invoker and
1641 // the function we're about to call is a lambda call operator,
1642 // skip the CheckInvoke, since the ThisPtr is a null pointer
1643 // anyway.
1644 if (!(S.Current->getFunction() &&
1645 S.Current->getFunction()->isLambdaStaticInvoker() &&
1646 Func->isLambdaCallOperator())) {
1647 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1648 return false;
1649 }
1650
1651 if (S.checkingPotentialConstantExpression())
1652 return false;
1653 }
1654
1655 if (!Func->isFullyCompiled())
1656 compileFunction(S, Func);
1657
1658 if (!CheckCallable(S, OpPC, F: Func))
1659 return false;
1660
1661 if (!CheckCallDepth(S, OpPC))
1662 return false;
1663
1664 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1665 InterpFrame *FrameBefore = S.Current;
1666 S.Current = NewFrame.get();
1667
1668 // Note that we cannot assert(CallResult.hasValue()) here since
1669 // Ret() above only sets the APValue if the curent frame doesn't
1670 // have a caller set.
1671 if (Interpret(S)) {
1672 NewFrame.release(); // Frame was delete'd already.
1673 assert(S.Current == FrameBefore);
1674 return true;
1675 }
1676
1677 // Interpreting the function failed somehow. Reset to
1678 // previous state.
1679 S.Current = FrameBefore;
1680 return false;
1681}
1682bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1683 uint32_t VarArgSize) {
1684
1685 // C doesn't have constexpr functions.
1686 if (!S.getLangOpts().CPlusPlus)
1687 return Invalid(S, OpPC);
1688
1689 assert(Func);
1690 auto cleanup = [&]() -> bool {
1691 cleanupAfterFunctionCall(S, OpPC, Func);
1692 return false;
1693 };
1694
1695 if (Func->hasThisPointer()) {
1696 size_t ArgSize = Func->getArgSize() + VarArgSize;
1697 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1698
1699 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1700
1701 // C++23 [expr.const]p5.6
1702 // an invocation of a virtual function ([class.virtual]) for an object whose
1703 // dynamic type is constexpr-unknown;
1704 if (ThisPtr.isDummy() && Func->isVirtual())
1705 return false;
1706
1707 // If the current function is a lambda static invoker and
1708 // the function we're about to call is a lambda call operator,
1709 // skip the CheckInvoke, since the ThisPtr is a null pointer
1710 // anyway.
1711 if (S.Current->getFunction() &&
1712 S.Current->getFunction()->isLambdaStaticInvoker() &&
1713 Func->isLambdaCallOperator()) {
1714 assert(ThisPtr.isZero());
1715 } else {
1716 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1717 return cleanup();
1718 if (!Func->isConstructor() && !Func->isDestructor() &&
1719 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1720 return false;
1721 }
1722
1723 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1724 return false;
1725 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1726 return false;
1727
1728 if (Func->isConstructor() || Func->isDestructor())
1729 S.InitializingBlocks.push_back(Elt: ThisPtr.block());
1730 }
1731
1732 if (!Func->isFullyCompiled())
1733 compileFunction(S, Func);
1734
1735 if (!CheckCallable(S, OpPC, F: Func))
1736 return cleanup();
1737
1738 // Do not evaluate any function calls in checkingPotentialConstantExpression
1739 // mode. Constructors will be aborted later when their initializers are
1740 // evaluated.
1741 if (S.checkingPotentialConstantExpression() && !Func->isConstructor())
1742 return false;
1743
1744 if (!CheckCallDepth(S, OpPC))
1745 return cleanup();
1746
1747 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1748 InterpFrame *FrameBefore = S.Current;
1749 S.Current = NewFrame.get();
1750
1751 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1752 // Note that we cannot assert(CallResult.hasValue()) here since
1753 // Ret() above only sets the APValue if the curent frame doesn't
1754 // have a caller set.
1755 bool Success = Interpret(S);
1756 // Remove initializing block again.
1757 if (Func->isConstructor() || Func->isDestructor())
1758 S.InitializingBlocks.pop_back();
1759
1760 if (!Success) {
1761 // Interpreting the function failed somehow. Reset to
1762 // previous state.
1763 S.Current = FrameBefore;
1764 return false;
1765 }
1766
1767 NewFrame.release(); // Frame was delete'd already.
1768 assert(S.Current == FrameBefore);
1769 return true;
1770}
1771
1772static bool GetDynamicDecl(InterpState &S, CodePtr OpPC, Pointer TypePtr,
1773 const CXXRecordDecl *&DynamicDecl) {
1774 TypePtr = TypePtr.stripBaseCasts();
1775
1776 QualType DynamicType = TypePtr.getType();
1777 if (TypePtr.isStatic() || TypePtr.isConst()) {
1778 if (const VarDecl *VD = TypePtr.getDeclDesc()->asVarDecl();
1779 VD && !VD->isConstexpr()) {
1780 const Expr *E = S.Current->getExpr(PC: OpPC);
1781 APValue V = TypePtr.toAPValue(ASTCtx: S.getASTContext());
1782 QualType TT = S.getASTContext().getLValueReferenceType(T: DynamicType);
1783 S.FFDiag(E, DiagId: diag::note_constexpr_polymorphic_unknown_dynamic_type)
1784 << AccessKinds::AK_MemberCall << V.getAsString(Ctx: S.getASTContext(), Ty: TT);
1785 return false;
1786 }
1787 }
1788
1789 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) {
1790 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1791 } else if (DynamicType->isArrayType()) {
1792 const Type *ElemType = DynamicType->getPointeeOrArrayElementType();
1793 assert(ElemType);
1794 DynamicDecl = ElemType->getAsCXXRecordDecl();
1795 } else {
1796 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1797 }
1798 return true;
1799}
1800
1801bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1802 uint32_t VarArgSize) {
1803 assert(Func->hasThisPointer());
1804 assert(Func->isVirtual());
1805 size_t ArgSize = Func->getArgSize() + VarArgSize;
1806 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1807 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1808 const FunctionDecl *Callee = Func->getDecl();
1809
1810 const CXXRecordDecl *DynamicDecl = nullptr;
1811 if (!GetDynamicDecl(S, OpPC, TypePtr: ThisPtr, DynamicDecl))
1812 return false;
1813 assert(DynamicDecl);
1814
1815 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1816 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1817 const CXXMethodDecl *Overrider;
1818
1819 if (StaticDecl != DynamicDecl &&
1820 !llvm::is_contained(Range&: S.InitializingBlocks, Element: ThisPtr.block())) {
1821 if (!DynamicDecl->isDerivedFrom(Base: StaticDecl))
1822 return false;
1823 Overrider = S.getContext().getOverridingFunction(DynamicDecl, StaticDecl,
1824 InitialFunction);
1825
1826 } else {
1827 Overrider = InitialFunction;
1828 }
1829
1830 // C++2a [class.abstract]p6:
1831 // the effect of making a virtual call to a pure virtual function [...] is
1832 // undefined
1833 if (Overrider->isPureVirtual()) {
1834 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1835 ExtraNotes: 1)
1836 << Callee;
1837 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1838 return false;
1839 }
1840
1841 if (Overrider != InitialFunction) {
1842 // DR1872: An instantiated virtual constexpr function can't be called in a
1843 // constant expression (prior to C++20). We can still constant-fold such a
1844 // call.
1845 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1846 const Expr *E = S.Current->getExpr(PC: OpPC);
1847 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1848 }
1849
1850 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1851
1852 const CXXRecordDecl *ThisFieldDecl =
1853 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1854 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1855 // If the function we call is further DOWN the hierarchy than the
1856 // FieldDesc of our pointer, just go up the hierarchy of this field
1857 // the furthest we can go.
1858 ThisPtr = ThisPtr.stripBaseCasts();
1859 }
1860 }
1861
1862 if (!Call(S, OpPC, Func, VarArgSize))
1863 return false;
1864
1865 // Covariant return types. The return type of Overrider is a pointer
1866 // or reference to a class type.
1867 if (Overrider != InitialFunction &&
1868 Overrider->getReturnType()->isPointerOrReferenceType() &&
1869 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1870 QualType OverriderPointeeType =
1871 Overrider->getReturnType()->getPointeeType();
1872 QualType InitialPointeeType =
1873 InitialFunction->getReturnType()->getPointeeType();
1874 // We've called Overrider above, but calling code expects us to return what
1875 // InitialFunction returned. According to the rules for covariant return
1876 // types, what InitialFunction returns needs to be a base class of what
1877 // Overrider returns. So, we need to do an upcast here.
1878 unsigned Offset = S.getContext().collectBaseOffset(
1879 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1880 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1881 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1882 }
1883
1884 return true;
1885}
1886
1887bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1888 uint32_t BuiltinID) {
1889 // A little arbitrary, but the current interpreter allows evaluation
1890 // of builtin functions in this mode, with some exceptions.
1891 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1892 S.checkingPotentialConstantExpression())
1893 return false;
1894
1895 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1896}
1897
1898bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1899 const CallExpr *CE) {
1900 const Pointer &Ptr = S.Stk.pop<Pointer>();
1901
1902 if (Ptr.isZero()) {
1903 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_null_callee)
1904 << const_cast<Expr *>(CE->getCallee()) << CE->getSourceRange();
1905 return false;
1906 }
1907
1908 if (!Ptr.isFunctionPointer())
1909 return Invalid(S, OpPC);
1910
1911 const FunctionPointer &FuncPtr = Ptr.asFunctionPointer();
1912 const Function *F = FuncPtr.getFunction();
1913 assert(F);
1914 // Don't allow calling block pointers.
1915 if (!F->getDecl())
1916 return Invalid(S, OpPC);
1917
1918 // This happens when the call expression has been cast to
1919 // something else, but we don't support that.
1920 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1921 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1922 return false;
1923
1924 // Check argument nullability state.
1925 if (F->hasNonNullAttr()) {
1926 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1927 return false;
1928 }
1929
1930 // Can happen when casting function pointers around.
1931 QualType CalleeType = CE->getCallee()->getType();
1932 if (CalleeType->isPointerType() &&
1933 !S.getASTContext().hasSameFunctionTypeIgnoringExceptionSpec(
1934 T: F->getDecl()->getType(), U: CalleeType->getPointeeType())) {
1935 return false;
1936 }
1937
1938 // We nedd to compile (and check) early for function pointer calls
1939 // because the Call/CallVirt below might access the instance pointer
1940 // but the Function's information about them is wrong.
1941 if (!F->isFullyCompiled())
1942 compileFunction(S, Func: F);
1943
1944 if (!CheckCallable(S, OpPC, F))
1945 return false;
1946
1947 assert(ArgSize >= F->getWrittenArgSize());
1948 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1949
1950 // We need to do this explicitly here since we don't have the necessary
1951 // information to do it automatically.
1952 if (F->isThisPointerExplicit())
1953 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1954
1955 if (F->isVirtual())
1956 return CallVirt(S, OpPC, Func: F, VarArgSize);
1957
1958 return Call(S, OpPC, Func: F, VarArgSize);
1959}
1960
1961static void startLifetimeRecurse(const Pointer &Ptr) {
1962 if (const Record *R = Ptr.getRecord()) {
1963 Ptr.startLifetime();
1964 for (const Record::Field &Fi : R->fields())
1965 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1966 return;
1967 }
1968
1969 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1970 FieldDesc->isCompositeArray()) {
1971 assert(Ptr.getLifetime() == Lifetime::Started);
1972 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1973 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1974 return;
1975 }
1976
1977 Ptr.startLifetime();
1978}
1979
1980bool StartLifetime(InterpState &S, CodePtr OpPC) {
1981 const auto &Ptr = S.Stk.peek<Pointer>();
1982 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
1983 return false;
1984 startLifetimeRecurse(Ptr: Ptr.narrow());
1985 return true;
1986}
1987
1988// FIXME: It might be better to the recursing as part of the generated code for
1989// a destructor?
1990static void endLifetimeRecurse(const Pointer &Ptr) {
1991 if (const Record *R = Ptr.getRecord()) {
1992 Ptr.endLifetime();
1993 for (const Record::Field &Fi : R->fields())
1994 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1995 return;
1996 }
1997
1998 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1999 FieldDesc->isCompositeArray()) {
2000 // No endLifetime() for array roots.
2001 assert(Ptr.getLifetime() == Lifetime::Started);
2002 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
2003 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
2004 return;
2005 }
2006
2007 Ptr.endLifetime();
2008}
2009
2010/// Ends the lifetime of the peek'd pointer.
2011bool EndLifetime(InterpState &S, CodePtr OpPC) {
2012 const auto &Ptr = S.Stk.peek<Pointer>();
2013 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2014 return false;
2015
2016 endLifetimeRecurse(Ptr: Ptr.narrow());
2017 return true;
2018}
2019
2020/// Ends the lifetime of the pop'd pointer.
2021bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
2022 const auto &Ptr = S.Stk.pop<Pointer>();
2023 if (Ptr.isBlockPointer() && !CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Destroy))
2024 return false;
2025
2026 endLifetimeRecurse(Ptr: Ptr.narrow());
2027 return true;
2028}
2029
2030bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
2031 std::optional<uint64_t> ArraySize) {
2032 const Pointer &Ptr = S.Stk.peek<Pointer>();
2033
2034 if (Ptr.inUnion() && Ptr.getBase().getRecord()->isUnion())
2035 Ptr.activate();
2036
2037 if (Ptr.isZero()) {
2038 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_null)
2039 << AK_Construct;
2040 return false;
2041 }
2042
2043 if (!Ptr.isBlockPointer())
2044 return false;
2045
2046 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2047 return false;
2048
2049 startLifetimeRecurse(Ptr);
2050
2051 // Similar to CheckStore(), but with the additional CheckTemporary() call and
2052 // the AccessKinds are different.
2053 if (!Ptr.block()->isAccessible()) {
2054 if (!CheckExtern(S, OpPC, Ptr))
2055 return false;
2056 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
2057 return false;
2058 return CheckDummy(S, OpPC, B: Ptr.block(), AK: AK_Construct);
2059 }
2060 if (!CheckTemporary(S, OpPC, B: Ptr.block(), AK: AK_Construct))
2061 return false;
2062
2063 // CheckLifetime for this and all base pointers.
2064 for (Pointer P = Ptr;;) {
2065 if (!CheckLifetime(S, OpPC, LT: P.getLifetime(), AK: AK_Construct))
2066 return false;
2067
2068 if (P.isRoot())
2069 break;
2070 P = P.getBase();
2071 }
2072
2073 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
2074 return false;
2075 if (!CheckGlobal(S, OpPC, Ptr))
2076 return false;
2077 if (!CheckConst(S, OpPC, Ptr))
2078 return false;
2079 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
2080 return false;
2081
2082 if (!InvalidNewDeleteExpr(S, OpPC, E))
2083 return false;
2084
2085 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
2086 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
2087 const ASTContext &ASTCtx = S.getASTContext();
2088 QualType AllocType;
2089 if (ArraySize) {
2090 AllocType = ASTCtx.getConstantArrayType(
2091 EltTy: NewExpr->getAllocatedType(),
2092 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
2093 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
2094 } else {
2095 AllocType = NewExpr->getAllocatedType();
2096 }
2097
2098 unsigned StorageSize = 1;
2099 unsigned AllocSize = 1;
2100 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
2101 AllocSize = CAT->getZExtSize();
2102 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
2103 StorageSize = CAT->getZExtSize();
2104
2105 if (AllocSize > StorageSize ||
2106 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
2107 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
2108 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
2109 DiagId: diag::note_constexpr_placement_new_wrong_type)
2110 << StorageType << AllocType;
2111 return false;
2112 }
2113
2114 // Can't activate fields in a union, unless the direct base is the union.
2115 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
2116 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
2117
2118 return true;
2119}
2120
2121bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
2122 assert(E);
2123
2124 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
2125 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
2126
2127 if (NewExpr->getNumPlacementArgs() > 0) {
2128 // This is allowed pre-C++26, but only an std function or if
2129 // [[msvc::constexpr]] was used.
2130 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction() ||
2131 S.Current->MSVCConstexprAllowed)
2132 return true;
2133
2134 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2135 << /*C++26 feature*/ 1 << E->getSourceRange();
2136 } else if (
2137 !OperatorNew
2138 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2139 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2140 DiagId: diag::note_constexpr_new_non_replaceable)
2141 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
2142 return false;
2143 } else if (!S.getLangOpts().CPlusPlus26 &&
2144 NewExpr->getNumPlacementArgs() == 1 &&
2145 !OperatorNew->isReservedGlobalPlacementOperator()) {
2146 if (!S.getLangOpts().CPlusPlus26) {
2147 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
2148 << /*Unsupported*/ 0 << E->getSourceRange();
2149 return false;
2150 }
2151 return true;
2152 }
2153 } else {
2154 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
2155 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
2156 if (!OperatorDelete
2157 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
2158 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
2159 DiagId: diag::note_constexpr_new_non_replaceable)
2160 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
2161 return false;
2162 }
2163 }
2164
2165 return false;
2166}
2167
2168bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
2169 const FixedPoint &FP) {
2170 const Expr *E = S.Current->getExpr(PC: OpPC);
2171 if (S.checkingForUndefinedBehavior()) {
2172 S.getASTContext().getDiagnostics().Report(
2173 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
2174 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2175 }
2176 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
2177 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
2178 return S.noteUndefinedBehavior();
2179}
2180
2181bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
2182 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
2183 S.FFDiag(SI: Loc,
2184 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
2185 << Index;
2186 return false;
2187}
2188
2189bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
2190 const Pointer &Ptr, unsigned BitWidth) {
2191 const SourceInfo &E = S.Current->getSource(PC: OpPC);
2192 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
2193 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2194
2195 if (Ptr.isDummy())
2196 return false;
2197 if (Ptr.isFunctionPointer())
2198 return true;
2199
2200 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
2201 // Only allow based lvalue casts if they are lossless.
2202 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
2203 BitWidth)
2204 return Invalid(S, OpPC);
2205 }
2206 return true;
2207}
2208
2209bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2210 const Pointer &Ptr = S.Stk.pop<Pointer>();
2211
2212 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2213 return false;
2214
2215 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
2216 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2217
2218 S.Stk.push<IntegralAP<false>>(Args&: Result);
2219 return true;
2220}
2221
2222bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
2223 const Pointer &Ptr = S.Stk.pop<Pointer>();
2224
2225 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
2226 return false;
2227
2228 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
2229 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
2230
2231 S.Stk.push<IntegralAP<true>>(Args&: Result);
2232 return true;
2233}
2234
2235bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
2236 bool TargetIsUCharOrByte) {
2237 // This is always fine.
2238 if (!HasIndeterminateBits)
2239 return true;
2240
2241 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
2242 if (TargetIsUCharOrByte)
2243 return true;
2244
2245 const Expr *E = S.Current->getExpr(PC: OpPC);
2246 QualType ExprType = E->getType();
2247 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
2248 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
2249 return false;
2250}
2251
2252bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
2253 const Type *TypeInfoType) {
2254 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
2255 return true;
2256}
2257
2258bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
2259 const auto &P = S.Stk.pop<Pointer>();
2260
2261 if (!P.isBlockPointer())
2262 return false;
2263
2264 // Pick the most-derived type.
2265 CanQualType T = P.getDeclPtr().getType()->getCanonicalTypeUnqualified();
2266 // ... unless we're currently constructing this object.
2267 // FIXME: We have a similar check to this in more places.
2268 if (S.Current->getFunction()) {
2269 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
2270 if (const Function *Func = Frame->getFunction();
2271 Func && (Func->isConstructor() || Func->isDestructor()) &&
2272 P.block() == Frame->getThis().block()) {
2273 T = S.getContext().getASTContext().getCanonicalTagType(
2274 TD: Func->getParentDecl());
2275 break;
2276 }
2277 }
2278 }
2279
2280 S.Stk.push<Pointer>(Args: T->getTypePtr(), Args&: TypeInfoType);
2281 return true;
2282}
2283
2284bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2285 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2286 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2287 << E->getExprOperand()->getType()
2288 << E->getExprOperand()->getSourceRange();
2289 return false;
2290}
2291
2292bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2293 const Pointer &RHS) {
2294 unsigned LHSOffset = LHS.isOnePastEnd() ? LHS.getNumElems() : LHS.getIndex();
2295 unsigned RHSOffset = RHS.isOnePastEnd() ? RHS.getNumElems() : RHS.getIndex();
2296 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2297 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2298
2299 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2300 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2301 int32_t IndexDiff = RHSOffset - LHSOffset;
2302 if (IndexDiff < 0) {
2303 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2304 return false;
2305 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2306 } else {
2307 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2308 return false;
2309 RHSStr = RHSStr.drop_front(N: IndexDiff);
2310 }
2311
2312 unsigned ShorterCharWidth;
2313 StringRef Shorter;
2314 StringRef Longer;
2315 if (LHSLength < RHSLength) {
2316 ShorterCharWidth = LHS.elemSize();
2317 Shorter = LHSStr;
2318 Longer = RHSStr;
2319 } else {
2320 ShorterCharWidth = RHS.elemSize();
2321 Shorter = RHSStr;
2322 Longer = LHSStr;
2323 }
2324
2325 // The null terminator isn't included in the string data, so check for it
2326 // manually. If the longer string doesn't have a null terminator where the
2327 // shorter string ends, they aren't potentially overlapping.
2328 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2329 if (Shorter.size() + NullByte >= Longer.size())
2330 break;
2331 if (Longer[Shorter.size() + NullByte])
2332 return false;
2333 }
2334 return Shorter == Longer.take_front(N: Shorter.size());
2335}
2336
2337static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2338 PrimType T) {
2339
2340 if (T == PT_IntAPS) {
2341 auto &Val = Ptr.deref<IntegralAP<true>>();
2342 if (!Val.singleWord()) {
2343 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2344 Val.take(NewMemory);
2345 }
2346 } else if (T == PT_IntAP) {
2347 auto &Val = Ptr.deref<IntegralAP<false>>();
2348 if (!Val.singleWord()) {
2349 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2350 Val.take(NewMemory);
2351 }
2352 } else if (T == PT_Float) {
2353 auto &Val = Ptr.deref<Floating>();
2354 if (!Val.singleWord()) {
2355 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2356 Val.take(NewMemory);
2357 }
2358 }
2359}
2360
2361template <typename T>
2362static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2363 assert(needsAlloc<T>());
2364 auto &Val = Ptr.deref<T>();
2365 if (!Val.singleWord()) {
2366 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2367 Val.take(NewMemory);
2368 }
2369}
2370
2371static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2372 if (const Record *R = Ptr.getRecord()) {
2373 for (const Record::Field &Fi : R->fields()) {
2374 if (Fi.Desc->isPrimitive()) {
2375 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2376 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2377 });
2378 copyPrimitiveMemory(S, Ptr: Ptr.atField(Off: Fi.Offset), T: Fi.Desc->getPrimType());
2379 } else
2380 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2381 }
2382 return;
2383 }
2384
2385 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2386 unsigned NumElems = D->getNumElems();
2387 if (NumElems == 0)
2388 return;
2389
2390 if (D->isPrimitiveArray()) {
2391 PrimType PT = D->getPrimType();
2392 if (!needsAlloc(T: PT))
2393 return;
2394 assert(NumElems >= 1);
2395 const Pointer EP = Ptr.atIndex(Idx: 0);
2396 bool AllSingleWord = true;
2397 TYPE_SWITCH_ALLOC(PT, {
2398 if (!EP.deref<T>().singleWord()) {
2399 copyPrimitiveMemory<T>(S, EP);
2400 AllSingleWord = false;
2401 }
2402 });
2403 if (AllSingleWord)
2404 return;
2405 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2406 const Pointer EP = Ptr.atIndex(Idx: I);
2407 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2408 }
2409 } else {
2410 assert(D->isCompositeArray());
2411 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2412 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2413 finishGlobalRecurse(S, Ptr: EP);
2414 }
2415 }
2416 }
2417}
2418
2419bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2420 const Pointer &Ptr = S.Stk.pop<Pointer>();
2421
2422 finishGlobalRecurse(S, Ptr);
2423 if (Ptr.canBeInitialized()) {
2424 Ptr.initialize();
2425 Ptr.activate();
2426 }
2427
2428 return true;
2429}
2430
2431bool InvalidCast(InterpState &S, CodePtr OpPC, CastKind Kind, bool Fatal) {
2432 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
2433
2434 switch (Kind) {
2435 case CastKind::Reinterpret:
2436 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2437 << diag::ConstexprInvalidCastKind::Reinterpret
2438 << S.Current->getRange(PC: OpPC);
2439 return !Fatal;
2440 case CastKind::ReinterpretLike:
2441 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2442 << diag::ConstexprInvalidCastKind::ThisConversionOrReinterpret
2443 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
2444 return !Fatal;
2445 case CastKind::Volatile:
2446 if (!S.checkingPotentialConstantExpression()) {
2447 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
2448 if (S.getLangOpts().CPlusPlus)
2449 S.FFDiag(E, DiagId: diag::note_constexpr_access_volatile_type)
2450 << AK_Read << E->getSubExpr()->getType();
2451 else
2452 S.FFDiag(E);
2453 }
2454
2455 return false;
2456 case CastKind::Dynamic:
2457 assert(!S.getLangOpts().CPlusPlus20);
2458 S.CCEDiag(Loc, DiagId: diag::note_constexpr_invalid_cast)
2459 << diag::ConstexprInvalidCastKind::Dynamic;
2460 return true;
2461 }
2462 llvm_unreachable("Unhandled CastKind");
2463 return false;
2464}
2465
2466bool Destroy(InterpState &S, CodePtr OpPC, uint32_t I) {
2467 assert(S.Current->getFunction());
2468 // FIXME: We iterate the scope once here and then again in the destroy() call
2469 // below.
2470 for (auto &Local : S.Current->getFunction()->getScope(Idx: I).locals_reverse()) {
2471 if (!S.Current->getLocalBlock(Offset: Local.Offset)->isInitialized())
2472 continue;
2473 const Pointer &Ptr = S.Current->getLocalPointer(Offset: Local.Offset);
2474 if (Ptr.getLifetime() == Lifetime::Ended) {
2475 // Try to use the declaration for better diagnostics
2476 if (const Decl *D = Ptr.getDeclDesc()->asDecl()) {
2477 auto *ND = cast<NamedDecl>(Val: D);
2478 S.FFDiag(Loc: ND->getLocation(),
2479 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2480 << ND->getNameAsString();
2481 } else {
2482 S.FFDiag(Loc: Ptr.getDeclDesc()->getLocation(),
2483 DiagId: diag::note_constexpr_destroy_out_of_lifetime)
2484 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
2485 }
2486 return false;
2487 }
2488 }
2489
2490 S.Current->destroy(Idx: I);
2491 return true;
2492}
2493
2494// https://github.com/llvm/llvm-project/issues/102513
2495#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2496#pragma optimize("", off)
2497#endif
2498bool Interpret(InterpState &S) {
2499 // The current stack frame when we started Interpret().
2500 // This is being used by the ops to determine wheter
2501 // to return from this function and thus terminate
2502 // interpretation.
2503 const InterpFrame *StartFrame = S.Current;
2504 assert(!S.Current->isRoot());
2505 CodePtr PC = S.Current->getPC();
2506
2507 // Empty program.
2508 if (!PC)
2509 return true;
2510
2511 for (;;) {
2512 auto Op = PC.read<Opcode>();
2513 CodePtr OpPC = PC;
2514
2515 switch (Op) {
2516#define GET_INTERP
2517#include "Opcodes.inc"
2518#undef GET_INTERP
2519 }
2520 }
2521}
2522// https://github.com/llvm/llvm-project/issues/102513
2523#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2524#pragma optimize("", on)
2525#endif
2526
2527} // namespace interp
2528} // namespace clang
2529