1//===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Program.h"
10#include "Context.h"
11#include "Function.h"
12#include "Integral.h"
13#include "PrimType.h"
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
16
17using namespace clang;
18using namespace clang::interp;
19
20unsigned Program::getOrCreateNativePointer(const void *Ptr) {
21 auto [It, Inserted] =
22 NativePointerIndices.try_emplace(Key: Ptr, Args: NativePointers.size());
23 if (Inserted)
24 NativePointers.push_back(x: Ptr);
25
26 return It->second;
27}
28
29const void *Program::getNativePointer(unsigned Idx) {
30 return NativePointers[Idx];
31}
32
33unsigned Program::createGlobalString(const StringLiteral *S, const Expr *Base) {
34 const size_t CharWidth = S->getCharByteWidth();
35 const size_t BitWidth = CharWidth * Ctx.getCharBit();
36 unsigned StringLength = S->getLength();
37
38 PrimType CharType;
39 switch (CharWidth) {
40 case 1:
41 CharType = PT_Sint8;
42 break;
43 case 2:
44 CharType = PT_Uint16;
45 break;
46 case 4:
47 CharType = PT_Uint32;
48 break;
49 default:
50 llvm_unreachable("unsupported character width");
51 }
52
53 if (!Base)
54 Base = S;
55
56 // Create a descriptor for the string.
57 Descriptor *Desc =
58 allocateDescriptor(Args&: Base, Args&: CharType, Args: Descriptor::GlobalMD, Args: StringLength + 1,
59 /*isConst=*/Args: true,
60 /*isTemporary=*/Args: false,
61 /*isMutable=*/Args: false);
62
63 // Allocate storage for the string.
64 // The byte length does not include the null terminator.
65 unsigned GlobalIndex = Globals.size();
66 unsigned Sz = Desc->getAllocSize();
67 auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true,
68 /*isExtern=*/false);
69 G->block()->invokeCtor();
70
71 new (G->block()->rawData())
72 GlobalInlineDescriptor{.InitState: GlobalInitState::Initialized};
73 Globals.push_back(x: G);
74
75 const Pointer Ptr(G->block());
76 if (CharWidth == 1) {
77 std::memcpy(dest: &Ptr.atIndex(Idx: 0).deref<char>(), src: S->getString().data(),
78 n: StringLength);
79 } else {
80 // Construct the string in storage.
81 for (unsigned I = 0; I <= StringLength; ++I) {
82 Pointer Field = Ptr.atIndex(Idx: I);
83 const uint32_t CodePoint = I == StringLength ? 0 : S->getCodeUnit(i: I);
84 switch (CharType) {
85 case PT_Sint8: {
86 using T = PrimConv<PT_Sint8>::T;
87 Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth);
88 break;
89 }
90 case PT_Uint16: {
91 using T = PrimConv<PT_Uint16>::T;
92 Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth);
93 break;
94 }
95 case PT_Uint32: {
96 using T = PrimConv<PT_Uint32>::T;
97 Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth);
98 break;
99 }
100 default:
101 llvm_unreachable("unsupported character type");
102 }
103 }
104 }
105 Ptr.initialize();
106
107 return GlobalIndex;
108}
109
110Pointer Program::getPtrGlobal(unsigned Idx) const {
111 assert(Idx < Globals.size());
112 return Pointer(Globals[Idx]->block());
113}
114
115std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) {
116 if (auto It = GlobalIndices.find(Val: VD); It != GlobalIndices.end())
117 return It->second;
118
119 // Find any previous declarations which were already evaluated.
120 std::optional<unsigned> Index;
121 for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) {
122 if (auto It = GlobalIndices.find(Val: P); It != GlobalIndices.end()) {
123 Index = It->second;
124 break;
125 }
126 }
127
128 // Map the decl to the existing index.
129 if (Index)
130 GlobalIndices[VD] = *Index;
131
132 return std::nullopt;
133}
134
135std::optional<unsigned> Program::getGlobal(const Expr *E) {
136 if (auto It = GlobalIndices.find(Val: E); It != GlobalIndices.end())
137 return It->second;
138 return std::nullopt;
139}
140
141std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD,
142 const Expr *Init) {
143 if (auto Idx = getGlobal(VD))
144 return Idx;
145
146 if (auto Idx = createGlobal(VD, Init)) {
147 GlobalIndices[VD] = *Idx;
148 return Idx;
149 }
150 return std::nullopt;
151}
152
153unsigned Program::getOrCreateDummy(const DeclTy &D) {
154 assert(D);
155 // Dedup blocks since they are immutable and pointers cannot be compared.
156 if (auto It = DummyVariables.find(Val: D.getOpaqueValue());
157 It != DummyVariables.end())
158 return It->second;
159
160 QualType QT;
161 bool IsWeak = false;
162 if (const auto *E = dyn_cast<const Expr *>(Val: D)) {
163 QT = E->getType();
164 } else {
165 const auto *VD = cast<ValueDecl>(Val: cast<const Decl *>(Val: D));
166 IsWeak = VD->isWeak();
167 QT = VD->getType();
168 if (const auto *RT = QT->getAs<ReferenceType>())
169 QT = RT->getPointeeType();
170 }
171 assert(!QT.isNull());
172
173 Descriptor *Desc;
174 if (std::optional<PrimType> T = Ctx.classify(T: QT))
175 Desc = createDescriptor(D, T: *T, /*SourceTy=*/nullptr, MDSize: std::nullopt,
176 /*IsConst=*/QT.isConstQualified());
177 else
178 Desc = createDescriptor(D, Ty: QT.getTypePtr(), MDSize: std::nullopt,
179 /*IsConst=*/QT.isConstQualified());
180 if (!Desc)
181 Desc = allocateDescriptor(Args: D);
182
183 assert(Desc);
184 Desc->makeDummy();
185
186 assert(Desc->isDummy());
187
188 // Allocate a block for storage.
189 unsigned I = Globals.size();
190
191 auto *G = new (Allocator, Desc->getAllocSize())
192 Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true,
193 /*IsExtern=*/false, IsWeak);
194 G->block()->invokeCtor();
195
196 Globals.push_back(x: G);
197 DummyVariables[D.getOpaqueValue()] = I;
198 return I;
199}
200
201std::optional<unsigned> Program::createGlobal(const ValueDecl *VD,
202 const Expr *Init) {
203 bool IsStatic, IsExtern;
204 bool IsWeak = VD->isWeak();
205 if (const auto *Var = dyn_cast<VarDecl>(Val: VD)) {
206 IsStatic = Context::shouldBeGloballyIndexed(VD);
207 IsExtern = Var->hasExternalStorage();
208 } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl,
209 TemplateParamObjectDecl>(Val: VD)) {
210 IsStatic = true;
211 IsExtern = false;
212 } else {
213 IsStatic = false;
214 IsExtern = true;
215 }
216
217 // Register all previous declarations as well. For extern blocks, just replace
218 // the index with the new variable.
219 if (auto Idx =
220 createGlobal(D: VD, Ty: VD->getType(), IsStatic, IsExtern, IsWeak, Init)) {
221 for (const Decl *P = VD; P; P = P->getPreviousDecl()) {
222 unsigned &PIdx = GlobalIndices[P];
223 if (P != VD) {
224 if (Globals[PIdx]->block()->isExtern())
225 Globals[PIdx] = Globals[*Idx];
226 }
227 PIdx = *Idx;
228 }
229 return *Idx;
230 }
231 return std::nullopt;
232}
233
234std::optional<unsigned> Program::createGlobal(const Expr *E) {
235 if (auto Idx = getGlobal(E))
236 return Idx;
237 if (auto Idx = createGlobal(D: E, Ty: E->getType(), /*isStatic=*/IsStatic: true,
238 /*isExtern=*/IsExtern: false, /*IsWeak=*/false)) {
239 GlobalIndices[E] = *Idx;
240 return *Idx;
241 }
242 return std::nullopt;
243}
244
245std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty,
246 bool IsStatic, bool IsExtern,
247 bool IsWeak, const Expr *Init) {
248 // Create a descriptor for the global.
249 Descriptor *Desc;
250 const bool IsConst = Ty.isConstQualified();
251 const bool IsTemporary = D.dyn_cast<const Expr *>();
252 const bool IsVolatile = Ty.isVolatileQualified();
253 if (std::optional<PrimType> T = Ctx.classify(T: Ty))
254 Desc = createDescriptor(D, T: *T, SourceTy: nullptr, MDSize: Descriptor::GlobalMD, IsConst,
255 IsTemporary, /*IsMutable=*/false, IsVolatile);
256 else
257 Desc = createDescriptor(D, Ty: Ty.getTypePtr(), MDSize: Descriptor::GlobalMD, IsConst,
258 IsTemporary, /*IsMutable=*/false, IsVolatile);
259
260 if (!Desc)
261 return std::nullopt;
262
263 // Allocate a block for storage.
264 unsigned I = Globals.size();
265
266 auto *G = new (Allocator, Desc->getAllocSize()) Global(
267 Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern, IsWeak);
268 G->block()->invokeCtor();
269
270 // Initialize InlineDescriptor fields.
271 auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor();
272 if (!Init)
273 GD->InitState = GlobalInitState::NoInitializer;
274 Globals.push_back(x: G);
275
276 return I;
277}
278
279Function *Program::getFunction(const FunctionDecl *F) {
280 F = F->getCanonicalDecl();
281 assert(F);
282 auto It = Funcs.find(Val: F);
283 return It == Funcs.end() ? nullptr : It->second.get();
284}
285
286Record *Program::getOrCreateRecord(const RecordDecl *RD) {
287 // Use the actual definition as a key.
288 RD = RD->getDefinition();
289 if (!RD)
290 return nullptr;
291
292 if (!RD->isCompleteDefinition())
293 return nullptr;
294
295 // Return an existing record if available. Otherwise, we insert nullptr now
296 // and replace that later, so recursive calls to this function with the same
297 // RecordDecl don't run into infinite recursion.
298 auto [It, Inserted] = Records.try_emplace(Key: RD);
299 if (!Inserted)
300 return It->second;
301
302 // Number of bytes required by fields and base classes.
303 unsigned BaseSize = 0;
304 // Number of bytes required by virtual base.
305 unsigned VirtSize = 0;
306
307 // Helper to get a base descriptor.
308 auto GetBaseDesc = [this](const RecordDecl *BD,
309 const Record *BR) -> const Descriptor * {
310 if (!BR)
311 return nullptr;
312 return allocateDescriptor(Args&: BD, Args&: BR, Args: std::nullopt, /*isConst=*/Args: false,
313 /*isTemporary=*/Args: false,
314 /*isMutable=*/Args: false, /*IsVolatile=*/Args: false);
315 };
316
317 // Reserve space for base classes.
318 Record::BaseList Bases;
319 Record::VirtualBaseList VirtBases;
320 if (const auto *CD = dyn_cast<CXXRecordDecl>(Val: RD)) {
321 for (const CXXBaseSpecifier &Spec : CD->bases()) {
322 if (Spec.isVirtual())
323 continue;
324
325 // In error cases, the base might not be a RecordType.
326 const auto *RT = Spec.getType()->getAs<RecordType>();
327 if (!RT)
328 return nullptr;
329 const RecordDecl *BD = RT->getDecl();
330 const Record *BR = getOrCreateRecord(RD: BD);
331
332 const Descriptor *Desc = GetBaseDesc(BD, BR);
333 if (!Desc)
334 return nullptr;
335
336 BaseSize += align(Size: sizeof(InlineDescriptor));
337 Bases.push_back(Elt: {.Decl: BD, .Offset: BaseSize, .Desc: Desc, .R: BR});
338 BaseSize += align(Size: BR->getSize());
339 }
340
341 for (const CXXBaseSpecifier &Spec : CD->vbases()) {
342 const auto *RT = Spec.getType()->getAs<RecordType>();
343 if (!RT)
344 return nullptr;
345
346 const RecordDecl *BD = RT->getDecl();
347 const Record *BR = getOrCreateRecord(RD: BD);
348
349 const Descriptor *Desc = GetBaseDesc(BD, BR);
350 if (!Desc)
351 return nullptr;
352
353 VirtSize += align(Size: sizeof(InlineDescriptor));
354 VirtBases.push_back(Elt: {.Decl: BD, .Offset: VirtSize, .Desc: Desc, .R: BR});
355 VirtSize += align(Size: BR->getSize());
356 }
357 }
358
359 // Reserve space for fields.
360 Record::FieldList Fields;
361 for (const FieldDecl *FD : RD->fields()) {
362 FD = FD->getFirstDecl();
363 // Note that we DO create fields and descriptors
364 // for unnamed bitfields here, even though we later ignore
365 // them everywhere. That's so the FieldDecl's getFieldIndex() matches.
366
367 // Reserve space for the field's descriptor and the offset.
368 BaseSize += align(Size: sizeof(InlineDescriptor));
369
370 // Classify the field and add its metadata.
371 QualType FT = FD->getType();
372 const bool IsConst = FT.isConstQualified();
373 const bool IsMutable = FD->isMutable();
374 const bool IsVolatile = FT.isVolatileQualified();
375 const Descriptor *Desc;
376 if (std::optional<PrimType> T = Ctx.classify(T: FT)) {
377 Desc = createDescriptor(D: FD, T: *T, SourceTy: nullptr, MDSize: std::nullopt, IsConst,
378 /*isTemporary=*/IsTemporary: false, IsMutable, IsVolatile);
379 } else {
380 Desc = createDescriptor(D: FD, Ty: FT.getTypePtr(), MDSize: std::nullopt, IsConst,
381 /*isTemporary=*/IsTemporary: false, IsMutable, IsVolatile);
382 }
383 if (!Desc)
384 return nullptr;
385 Fields.push_back(Elt: {.Decl: FD, .Offset: BaseSize, .Desc: Desc});
386 BaseSize += align(Size: Desc->getAllocSize());
387 }
388
389 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields),
390 std::move(VirtBases), VirtSize, BaseSize);
391 Records[RD] = R;
392 return R;
393}
394
395Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty,
396 Descriptor::MetadataSize MDSize,
397 bool IsConst, bool IsTemporary,
398 bool IsMutable, bool IsVolatile,
399 const Expr *Init) {
400
401 // Classes and structures.
402 if (const auto *RT = Ty->getAs<RecordType>()) {
403 if (const auto *Record = getOrCreateRecord(RD: RT->getDecl()))
404 return allocateDescriptor(Args: D, Args&: Record, Args&: MDSize, Args&: IsConst, Args&: IsTemporary,
405 Args&: IsMutable, Args&: IsVolatile);
406 return allocateDescriptor(Args: D, Args&: MDSize);
407 }
408
409 // Arrays.
410 if (const auto *ArrayType = Ty->getAsArrayTypeUnsafe()) {
411 QualType ElemTy = ArrayType->getElementType();
412 // Array of well-known bounds.
413 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val: ArrayType)) {
414 size_t NumElems = CAT->getZExtSize();
415 if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) {
416 // Arrays of primitives.
417 unsigned ElemSize = primSize(Type: *T);
418 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) {
419 return {};
420 }
421 return allocateDescriptor(Args: D, Args&: *T, Args&: MDSize, Args&: NumElems, Args&: IsConst, Args&: IsTemporary,
422 Args&: IsMutable);
423 } else {
424 // Arrays of composites. In this case, the array is a list of pointers,
425 // followed by the actual elements.
426 const Descriptor *ElemDesc = createDescriptor(
427 D, Ty: ElemTy.getTypePtr(), MDSize: std::nullopt, IsConst, IsTemporary);
428 if (!ElemDesc)
429 return nullptr;
430 unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor);
431 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems)
432 return {};
433 return allocateDescriptor(Args: D, Args&: Ty, Args&: ElemDesc, Args&: MDSize, Args&: NumElems, Args&: IsConst,
434 Args&: IsTemporary, Args&: IsMutable);
435 }
436 }
437
438 // Array of unknown bounds - cannot be accessed and pointer arithmetic
439 // is forbidden on pointers to such objects.
440 if (isa<IncompleteArrayType>(Val: ArrayType) ||
441 isa<VariableArrayType>(Val: ArrayType)) {
442 if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) {
443 return allocateDescriptor(Args: D, Args&: *T, Args&: MDSize, Args&: IsConst, Args&: IsTemporary,
444 Args: Descriptor::UnknownSize{});
445 } else {
446 const Descriptor *Desc = createDescriptor(
447 D, Ty: ElemTy.getTypePtr(), MDSize: std::nullopt, IsConst, IsTemporary);
448 if (!Desc)
449 return nullptr;
450 return allocateDescriptor(Args: D, Args&: Desc, Args&: MDSize, Args&: IsTemporary,
451 Args: Descriptor::UnknownSize{});
452 }
453 }
454 }
455
456 // Atomic types.
457 if (const auto *AT = Ty->getAs<AtomicType>()) {
458 const Type *InnerTy = AT->getValueType().getTypePtr();
459 return createDescriptor(D, Ty: InnerTy, MDSize, IsConst, IsTemporary,
460 IsMutable);
461 }
462
463 // Complex types - represented as arrays of elements.
464 if (const auto *CT = Ty->getAs<ComplexType>()) {
465 std::optional<PrimType> ElemTy = Ctx.classify(T: CT->getElementType());
466 if (!ElemTy)
467 return nullptr;
468
469 return allocateDescriptor(Args: D, Args&: *ElemTy, Args&: MDSize, Args: 2, Args&: IsConst, Args&: IsTemporary,
470 Args&: IsMutable);
471 }
472
473 // Same with vector types.
474 if (const auto *VT = Ty->getAs<VectorType>()) {
475 std::optional<PrimType> ElemTy = Ctx.classify(T: VT->getElementType());
476 if (!ElemTy)
477 return nullptr;
478
479 return allocateDescriptor(Args: D, Args&: *ElemTy, Args&: MDSize, Args: VT->getNumElements(), Args&: IsConst,
480 Args&: IsTemporary, Args&: IsMutable);
481 }
482
483 return nullptr;
484}
485