1 | //===--- Pointer.cpp - Types for the constexpr VM ---------------*- C++ -*-===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | |
9 | #include "Pointer.h" |
10 | #include "Boolean.h" |
11 | #include "Context.h" |
12 | #include "Floating.h" |
13 | #include "Function.h" |
14 | #include "Integral.h" |
15 | #include "InterpBlock.h" |
16 | #include "MemberPointer.h" |
17 | #include "PrimType.h" |
18 | #include "Record.h" |
19 | #include "clang/AST/RecordLayout.h" |
20 | |
21 | using namespace clang; |
22 | using namespace clang::interp; |
23 | |
24 | Pointer::Pointer(Block *Pointee) |
25 | : Pointer(Pointee, Pointee->getDescriptor()->getMetadataSize(), |
26 | Pointee->getDescriptor()->getMetadataSize()) {} |
27 | |
28 | Pointer::Pointer(Block *Pointee, uint64_t BaseAndOffset) |
29 | : Pointer(Pointee, BaseAndOffset, BaseAndOffset) {} |
30 | |
31 | Pointer::Pointer(const Pointer &P) |
32 | : Offset(P.Offset), PointeeStorage(P.PointeeStorage), |
33 | StorageKind(P.StorageKind) { |
34 | |
35 | if (isBlockPointer() && PointeeStorage.BS.Pointee) |
36 | PointeeStorage.BS.Pointee->addPointer(P: this); |
37 | } |
38 | |
39 | Pointer::Pointer(Block *Pointee, unsigned Base, uint64_t Offset) |
40 | : Offset(Offset), StorageKind(Storage::Block) { |
41 | assert((Base == RootPtrMark || Base % alignof(void *) == 0) && "wrong base" ); |
42 | |
43 | PointeeStorage.BS = {.Pointee: Pointee, .Base: Base}; |
44 | |
45 | if (Pointee) |
46 | Pointee->addPointer(P: this); |
47 | } |
48 | |
49 | Pointer::Pointer(Pointer &&P) |
50 | : Offset(P.Offset), PointeeStorage(P.PointeeStorage), |
51 | StorageKind(P.StorageKind) { |
52 | |
53 | if (StorageKind == Storage::Block && PointeeStorage.BS.Pointee) |
54 | PointeeStorage.BS.Pointee->replacePointer(Old: &P, New: this); |
55 | } |
56 | |
57 | Pointer::~Pointer() { |
58 | if (isIntegralPointer()) |
59 | return; |
60 | |
61 | if (Block *Pointee = PointeeStorage.BS.Pointee) { |
62 | Pointee->removePointer(P: this); |
63 | Pointee->cleanup(); |
64 | } |
65 | } |
66 | |
67 | void Pointer::operator=(const Pointer &P) { |
68 | // If the current storage type is Block, we need to remove |
69 | // this pointer from the block. |
70 | bool WasBlockPointer = isBlockPointer(); |
71 | if (StorageKind == Storage::Block) { |
72 | Block *Old = PointeeStorage.BS.Pointee; |
73 | if (WasBlockPointer && Old) { |
74 | PointeeStorage.BS.Pointee->removePointer(P: this); |
75 | Old->cleanup(); |
76 | } |
77 | } |
78 | |
79 | StorageKind = P.StorageKind; |
80 | Offset = P.Offset; |
81 | |
82 | if (P.isBlockPointer()) { |
83 | PointeeStorage.BS = P.PointeeStorage.BS; |
84 | PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee; |
85 | |
86 | if (PointeeStorage.BS.Pointee) |
87 | PointeeStorage.BS.Pointee->addPointer(P: this); |
88 | } else if (P.isIntegralPointer()) { |
89 | PointeeStorage.Int = P.PointeeStorage.Int; |
90 | } else { |
91 | assert(false && "Unhandled storage kind" ); |
92 | } |
93 | } |
94 | |
95 | void Pointer::operator=(Pointer &&P) { |
96 | // If the current storage type is Block, we need to remove |
97 | // this pointer from the block. |
98 | bool WasBlockPointer = isBlockPointer(); |
99 | if (StorageKind == Storage::Block) { |
100 | Block *Old = PointeeStorage.BS.Pointee; |
101 | if (WasBlockPointer && Old) { |
102 | PointeeStorage.BS.Pointee->removePointer(P: this); |
103 | Old->cleanup(); |
104 | } |
105 | } |
106 | |
107 | StorageKind = P.StorageKind; |
108 | Offset = P.Offset; |
109 | |
110 | if (P.isBlockPointer()) { |
111 | PointeeStorage.BS = P.PointeeStorage.BS; |
112 | PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee; |
113 | |
114 | if (PointeeStorage.BS.Pointee) |
115 | PointeeStorage.BS.Pointee->addPointer(P: this); |
116 | } else if (P.isIntegralPointer()) { |
117 | PointeeStorage.Int = P.PointeeStorage.Int; |
118 | } else { |
119 | assert(false && "Unhandled storage kind" ); |
120 | } |
121 | } |
122 | |
123 | APValue Pointer::toAPValue(const ASTContext &ASTCtx) const { |
124 | llvm::SmallVector<APValue::LValuePathEntry, 5> Path; |
125 | |
126 | if (isZero()) |
127 | return APValue(static_cast<const Expr *>(nullptr), CharUnits::Zero(), Path, |
128 | /*IsOnePastEnd=*/false, /*IsNullPtr=*/true); |
129 | if (isIntegralPointer()) |
130 | return APValue(static_cast<const Expr *>(nullptr), |
131 | CharUnits::fromQuantity(Quantity: asIntPointer().Value + this->Offset), |
132 | Path, |
133 | /*IsOnePastEnd=*/false, /*IsNullPtr=*/false); |
134 | |
135 | // Build the lvalue base from the block. |
136 | const Descriptor *Desc = getDeclDesc(); |
137 | APValue::LValueBase Base; |
138 | if (const auto *VD = Desc->asValueDecl()) |
139 | Base = VD; |
140 | else if (const auto *E = Desc->asExpr()) |
141 | Base = E; |
142 | else |
143 | llvm_unreachable("Invalid allocation type" ); |
144 | |
145 | if (isUnknownSizeArray() || Desc->asExpr()) |
146 | return APValue(Base, CharUnits::Zero(), Path, |
147 | /*IsOnePastEnd=*/isOnePastEnd(), /*IsNullPtr=*/false); |
148 | |
149 | CharUnits Offset = CharUnits::Zero(); |
150 | |
151 | auto getFieldOffset = [&](const FieldDecl *FD) -> CharUnits { |
152 | // This shouldn't happen, but if it does, don't crash inside |
153 | // getASTRecordLayout. |
154 | if (FD->getParent()->isInvalidDecl()) |
155 | return CharUnits::Zero(); |
156 | const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent()); |
157 | unsigned FieldIndex = FD->getFieldIndex(); |
158 | return ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex)); |
159 | }; |
160 | |
161 | // Build the path into the object. |
162 | Pointer Ptr = *this; |
163 | while (Ptr.isField() || Ptr.isArrayElement()) { |
164 | if (Ptr.isArrayRoot()) { |
165 | Path.push_back(Elt: APValue::LValuePathEntry( |
166 | {Ptr.getFieldDesc()->asDecl(), /*IsVirtual=*/false})); |
167 | |
168 | if (const auto *FD = dyn_cast<FieldDecl>(Val: Ptr.getFieldDesc()->asDecl())) |
169 | Offset += getFieldOffset(FD); |
170 | |
171 | Ptr = Ptr.getBase(); |
172 | } else if (Ptr.isArrayElement()) { |
173 | unsigned Index; |
174 | if (Ptr.isOnePastEnd()) |
175 | Index = Ptr.getArray().getNumElems(); |
176 | else |
177 | Index = Ptr.getIndex(); |
178 | |
179 | Offset += (Index * ASTCtx.getTypeSizeInChars(T: Ptr.getType())); |
180 | Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index)); |
181 | Ptr = Ptr.getArray(); |
182 | } else { |
183 | bool IsVirtual = false; |
184 | |
185 | // Create a path entry for the field. |
186 | const Descriptor *Desc = Ptr.getFieldDesc(); |
187 | if (const auto *BaseOrMember = Desc->asDecl()) { |
188 | if (const auto *FD = dyn_cast<FieldDecl>(Val: BaseOrMember)) { |
189 | Ptr = Ptr.getBase(); |
190 | Offset += getFieldOffset(FD); |
191 | } else if (const auto *RD = dyn_cast<CXXRecordDecl>(Val: BaseOrMember)) { |
192 | IsVirtual = Ptr.isVirtualBaseClass(); |
193 | Ptr = Ptr.getBase(); |
194 | const Record *BaseRecord = Ptr.getRecord(); |
195 | |
196 | const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout( |
197 | D: cast<CXXRecordDecl>(Val: BaseRecord->getDecl())); |
198 | if (IsVirtual) |
199 | Offset += Layout.getVBaseClassOffset(VBase: RD); |
200 | else |
201 | Offset += Layout.getBaseClassOffset(Base: RD); |
202 | |
203 | } else { |
204 | Ptr = Ptr.getBase(); |
205 | } |
206 | Path.push_back(Elt: APValue::LValuePathEntry({BaseOrMember, IsVirtual})); |
207 | continue; |
208 | } |
209 | llvm_unreachable("Invalid field type" ); |
210 | } |
211 | } |
212 | |
213 | // FIXME(perf): We compute the lvalue path above, but we can't supply it |
214 | // for dummy pointers (that causes crashes later in CheckConstantExpression). |
215 | if (isDummy()) |
216 | Path.clear(); |
217 | |
218 | // We assemble the LValuePath starting from the innermost pointer to the |
219 | // outermost one. SO in a.b.c, the first element in Path will refer to |
220 | // the field 'c', while later code expects it to refer to 'a'. |
221 | // Just invert the order of the elements. |
222 | std::reverse(first: Path.begin(), last: Path.end()); |
223 | |
224 | return APValue(Base, Offset, Path, /*IsOnePastEnd=*/isOnePastEnd(), |
225 | /*IsNullPtr=*/false); |
226 | } |
227 | |
228 | void Pointer::print(llvm::raw_ostream &OS) const { |
229 | OS << PointeeStorage.BS.Pointee << " (" ; |
230 | if (isBlockPointer()) { |
231 | const Block *B = PointeeStorage.BS.Pointee; |
232 | OS << "Block) {" ; |
233 | |
234 | if (isRoot()) |
235 | OS << "rootptr(" << PointeeStorage.BS.Base << "), " ; |
236 | else |
237 | OS << PointeeStorage.BS.Base << ", " ; |
238 | |
239 | if (isElementPastEnd()) |
240 | OS << "pastend, " ; |
241 | else |
242 | OS << Offset << ", " ; |
243 | |
244 | if (B) |
245 | OS << B->getSize(); |
246 | else |
247 | OS << "nullptr" ; |
248 | } else { |
249 | OS << "Int) {" ; |
250 | OS << PointeeStorage.Int.Value << ", " << PointeeStorage.Int.Desc; |
251 | } |
252 | OS << "}" ; |
253 | } |
254 | |
255 | std::string Pointer::toDiagnosticString(const ASTContext &Ctx) const { |
256 | if (isZero()) |
257 | return "nullptr" ; |
258 | |
259 | if (isIntegralPointer()) |
260 | return (Twine("&(" ) + Twine(asIntPointer().Value + Offset) + ")" ).str(); |
261 | |
262 | return toAPValue(ASTCtx: Ctx).getAsString(Ctx, Ty: getType()); |
263 | } |
264 | |
265 | bool Pointer::isInitialized() const { |
266 | if (isIntegralPointer()) |
267 | return true; |
268 | |
269 | if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) { |
270 | const GlobalInlineDescriptor &GD = |
271 | *reinterpret_cast<const GlobalInlineDescriptor *>(block()->rawData()); |
272 | return GD.InitState == GlobalInitState::Initialized; |
273 | } |
274 | |
275 | assert(PointeeStorage.BS.Pointee && |
276 | "Cannot check if null pointer was initialized" ); |
277 | const Descriptor *Desc = getFieldDesc(); |
278 | assert(Desc); |
279 | if (Desc->isPrimitiveArray()) { |
280 | if (isStatic() && PointeeStorage.BS.Base == 0) |
281 | return true; |
282 | |
283 | InitMapPtr &IM = getInitMap(); |
284 | |
285 | if (!IM) |
286 | return false; |
287 | |
288 | if (IM->first) |
289 | return true; |
290 | |
291 | return IM->second->isElementInitialized(I: getIndex()); |
292 | } |
293 | |
294 | if (asBlockPointer().Base == 0) |
295 | return true; |
296 | |
297 | // Field has its bit in an inline descriptor. |
298 | return getInlineDesc()->IsInitialized; |
299 | } |
300 | |
301 | void Pointer::initialize() const { |
302 | if (isIntegralPointer()) |
303 | return; |
304 | |
305 | assert(PointeeStorage.BS.Pointee && "Cannot initialize null pointer" ); |
306 | const Descriptor *Desc = getFieldDesc(); |
307 | |
308 | if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) { |
309 | GlobalInlineDescriptor &GD = *reinterpret_cast<GlobalInlineDescriptor *>( |
310 | asBlockPointer().Pointee->rawData()); |
311 | GD.InitState = GlobalInitState::Initialized; |
312 | return; |
313 | } |
314 | |
315 | assert(Desc); |
316 | if (Desc->isPrimitiveArray()) { |
317 | // Primitive global arrays don't have an initmap. |
318 | if (isStatic() && PointeeStorage.BS.Base == 0) |
319 | return; |
320 | |
321 | // Nothing to do for these. |
322 | if (Desc->getNumElems() == 0) |
323 | return; |
324 | |
325 | InitMapPtr &IM = getInitMap(); |
326 | if (!IM) |
327 | IM = |
328 | std::make_pair(x: false, y: std::make_shared<InitMap>(args: Desc->getNumElems())); |
329 | |
330 | assert(IM); |
331 | |
332 | // All initialized. |
333 | if (IM->first) |
334 | return; |
335 | |
336 | if (IM->second->initializeElement(I: getIndex())) { |
337 | IM->first = true; |
338 | IM->second.reset(); |
339 | } |
340 | return; |
341 | } |
342 | |
343 | // Field has its bit in an inline descriptor. |
344 | assert(PointeeStorage.BS.Base != 0 && |
345 | "Only composite fields can be initialised" ); |
346 | getInlineDesc()->IsInitialized = true; |
347 | } |
348 | |
349 | void Pointer::activate() const { |
350 | // Field has its bit in an inline descriptor. |
351 | assert(PointeeStorage.BS.Base != 0 && |
352 | "Only composite fields can be initialised" ); |
353 | |
354 | if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) |
355 | return; |
356 | |
357 | getInlineDesc()->IsActive = true; |
358 | } |
359 | |
360 | void Pointer::deactivate() const { |
361 | // TODO: this only appears in constructors, so nothing to deactivate. |
362 | } |
363 | |
364 | bool Pointer::hasSameBase(const Pointer &A, const Pointer &B) { |
365 | // Two null pointers always have the same base. |
366 | if (A.isZero() && B.isZero()) |
367 | return true; |
368 | |
369 | if (A.isIntegralPointer() && B.isIntegralPointer()) |
370 | return true; |
371 | |
372 | if (A.isIntegralPointer() || B.isIntegralPointer()) |
373 | return A.getSource() == B.getSource(); |
374 | |
375 | return A.asBlockPointer().Pointee == B.asBlockPointer().Pointee; |
376 | } |
377 | |
378 | bool Pointer::hasSameArray(const Pointer &A, const Pointer &B) { |
379 | return hasSameBase(A, B) && |
380 | A.PointeeStorage.BS.Base == B.PointeeStorage.BS.Base && |
381 | A.getFieldDesc()->IsArray; |
382 | } |
383 | |
384 | std::optional<APValue> Pointer::toRValue(const Context &Ctx, |
385 | QualType ResultType) const { |
386 | const ASTContext &ASTCtx = Ctx.getASTContext(); |
387 | assert(!ResultType.isNull()); |
388 | // Method to recursively traverse composites. |
389 | std::function<bool(QualType, const Pointer &, APValue &)> Composite; |
390 | Composite = [&Composite, &Ctx, &ASTCtx](QualType Ty, const Pointer &Ptr, |
391 | APValue &R) { |
392 | if (const auto *AT = Ty->getAs<AtomicType>()) |
393 | Ty = AT->getValueType(); |
394 | |
395 | // Invalid pointers. |
396 | if (Ptr.isDummy() || !Ptr.isLive() || !Ptr.isBlockPointer() || |
397 | Ptr.isPastEnd()) |
398 | return false; |
399 | |
400 | // Primitive values. |
401 | if (std::optional<PrimType> T = Ctx.classify(T: Ty)) { |
402 | TYPE_SWITCH(*T, R = Ptr.deref<T>().toAPValue(ASTCtx)); |
403 | return true; |
404 | } |
405 | |
406 | if (const auto *RT = Ty->getAs<RecordType>()) { |
407 | const auto *Record = Ptr.getRecord(); |
408 | assert(Record && "Missing record descriptor" ); |
409 | |
410 | bool Ok = true; |
411 | if (RT->getDecl()->isUnion()) { |
412 | const FieldDecl *ActiveField = nullptr; |
413 | APValue Value; |
414 | for (const auto &F : Record->fields()) { |
415 | const Pointer &FP = Ptr.atField(Off: F.Offset); |
416 | QualType FieldTy = F.Decl->getType(); |
417 | if (FP.isActive()) { |
418 | if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) { |
419 | TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx)); |
420 | } else { |
421 | Ok &= Composite(FieldTy, FP, Value); |
422 | } |
423 | ActiveField = FP.getFieldDesc()->asFieldDecl(); |
424 | break; |
425 | } |
426 | } |
427 | R = APValue(ActiveField, Value); |
428 | } else { |
429 | unsigned NF = Record->getNumFields(); |
430 | unsigned NB = Record->getNumBases(); |
431 | unsigned NV = Ptr.isBaseClass() ? 0 : Record->getNumVirtualBases(); |
432 | |
433 | R = APValue(APValue::UninitStruct(), NB, NF); |
434 | |
435 | for (unsigned I = 0; I < NF; ++I) { |
436 | const Record::Field *FD = Record->getField(I); |
437 | QualType FieldTy = FD->Decl->getType(); |
438 | const Pointer &FP = Ptr.atField(Off: FD->Offset); |
439 | APValue &Value = R.getStructField(i: I); |
440 | |
441 | if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) { |
442 | TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx)); |
443 | } else { |
444 | Ok &= Composite(FieldTy, FP, Value); |
445 | } |
446 | } |
447 | |
448 | for (unsigned I = 0; I < NB; ++I) { |
449 | const Record::Base *BD = Record->getBase(I); |
450 | QualType BaseTy = Ctx.getASTContext().getRecordType(Decl: BD->Decl); |
451 | const Pointer &BP = Ptr.atField(Off: BD->Offset); |
452 | Ok &= Composite(BaseTy, BP, R.getStructBase(i: I)); |
453 | } |
454 | |
455 | for (unsigned I = 0; I < NV; ++I) { |
456 | const Record::Base *VD = Record->getVirtualBase(I); |
457 | QualType VirtBaseTy = Ctx.getASTContext().getRecordType(Decl: VD->Decl); |
458 | const Pointer &VP = Ptr.atField(Off: VD->Offset); |
459 | Ok &= Composite(VirtBaseTy, VP, R.getStructBase(i: NB + I)); |
460 | } |
461 | } |
462 | return Ok; |
463 | } |
464 | |
465 | if (Ty->isIncompleteArrayType()) { |
466 | R = APValue(APValue::UninitArray(), 0, 0); |
467 | return true; |
468 | } |
469 | |
470 | if (const auto *AT = Ty->getAsArrayTypeUnsafe()) { |
471 | const size_t NumElems = Ptr.getNumElems(); |
472 | QualType ElemTy = AT->getElementType(); |
473 | R = APValue(APValue::UninitArray{}, NumElems, NumElems); |
474 | |
475 | bool Ok = true; |
476 | for (unsigned I = 0; I < NumElems; ++I) { |
477 | APValue &Slot = R.getArrayInitializedElt(I); |
478 | const Pointer &EP = Ptr.atIndex(Idx: I); |
479 | if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) { |
480 | TYPE_SWITCH(*T, Slot = EP.deref<T>().toAPValue(ASTCtx)); |
481 | } else { |
482 | Ok &= Composite(ElemTy, EP.narrow(), Slot); |
483 | } |
484 | } |
485 | return Ok; |
486 | } |
487 | |
488 | // Complex types. |
489 | if (const auto *CT = Ty->getAs<ComplexType>()) { |
490 | QualType ElemTy = CT->getElementType(); |
491 | |
492 | if (ElemTy->isIntegerType()) { |
493 | std::optional<PrimType> ElemT = Ctx.classify(T: ElemTy); |
494 | assert(ElemT); |
495 | INT_TYPE_SWITCH(*ElemT, { |
496 | auto V1 = Ptr.atIndex(0).deref<T>(); |
497 | auto V2 = Ptr.atIndex(1).deref<T>(); |
498 | R = APValue(V1.toAPSInt(), V2.toAPSInt()); |
499 | return true; |
500 | }); |
501 | } else if (ElemTy->isFloatingType()) { |
502 | R = APValue(Ptr.atIndex(Idx: 0).deref<Floating>().getAPFloat(), |
503 | Ptr.atIndex(Idx: 1).deref<Floating>().getAPFloat()); |
504 | return true; |
505 | } |
506 | return false; |
507 | } |
508 | |
509 | // Vector types. |
510 | if (const auto *VT = Ty->getAs<VectorType>()) { |
511 | assert(Ptr.getFieldDesc()->isPrimitiveArray()); |
512 | QualType ElemTy = VT->getElementType(); |
513 | PrimType ElemT = *Ctx.classify(T: ElemTy); |
514 | |
515 | SmallVector<APValue> Values; |
516 | Values.reserve(N: VT->getNumElements()); |
517 | for (unsigned I = 0; I != VT->getNumElements(); ++I) { |
518 | TYPE_SWITCH(ElemT, { |
519 | Values.push_back(Ptr.atIndex(I).deref<T>().toAPValue(ASTCtx)); |
520 | }); |
521 | } |
522 | |
523 | assert(Values.size() == VT->getNumElements()); |
524 | R = APValue(Values.data(), Values.size()); |
525 | return true; |
526 | } |
527 | |
528 | llvm_unreachable("invalid value to return" ); |
529 | }; |
530 | |
531 | // Invalid to read from. |
532 | if (isDummy() || !isLive() || isPastEnd()) |
533 | return std::nullopt; |
534 | |
535 | // We can return these as rvalues, but we can't deref() them. |
536 | if (isZero() || isIntegralPointer()) |
537 | return toAPValue(ASTCtx); |
538 | |
539 | // Just load primitive types. |
540 | if (std::optional<PrimType> T = Ctx.classify(T: ResultType)) { |
541 | TYPE_SWITCH(*T, return this->deref<T>().toAPValue(ASTCtx)); |
542 | } |
543 | |
544 | // Return the composite type. |
545 | APValue Result; |
546 | if (!Composite(getType(), *this, Result)) |
547 | return std::nullopt; |
548 | return Result; |
549 | } |
550 | |