1//===--- Pointer.cpp - Types for the constexpr VM ---------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Pointer.h"
10#include "Boolean.h"
11#include "Context.h"
12#include "Floating.h"
13#include "Function.h"
14#include "InitMap.h"
15#include "Integral.h"
16#include "InterpBlock.h"
17#include "MemberPointer.h"
18#include "PrimType.h"
19#include "Record.h"
20#include "clang/AST/Expr.h"
21#include "clang/AST/ExprCXX.h"
22#include "clang/AST/RecordLayout.h"
23
24using namespace clang;
25using namespace clang::interp;
26
27Pointer::Pointer(Block *Pointee)
28 : Pointer(Pointee, Pointee->getDescriptor()->getMetadataSize(),
29 Pointee->getDescriptor()->getMetadataSize()) {}
30
31Pointer::Pointer(Block *Pointee, uint64_t BaseAndOffset)
32 : Pointer(Pointee, BaseAndOffset, BaseAndOffset) {}
33
34Pointer::Pointer(Block *Pointee, unsigned Base, uint64_t Offset)
35 : Offset(Offset), StorageKind(Storage::Block) {
36 assert(Pointee);
37 assert((Base == RootPtrMark || Base % alignof(void *) == 0) && "wrong base");
38 assert(Base >= Pointee->getDescriptor()->getMetadataSize());
39
40 BS = {.Pointee: Pointee, .Base: Base, .Prev: nullptr, .Next: nullptr};
41 Pointee->addPointer(P: this);
42}
43
44Pointer::Pointer(const Pointer &P)
45 : Offset(P.Offset), StorageKind(P.StorageKind) {
46 switch (StorageKind) {
47 case Storage::Int:
48 Int = P.Int;
49 break;
50 case Storage::Block:
51 BS = P.BS;
52 if (BS.Pointee)
53 BS.Pointee->addPointer(P: this);
54 break;
55 case Storage::Fn:
56 Fn = P.Fn;
57 break;
58 case Storage::Typeid:
59 Typeid = P.Typeid;
60 break;
61 }
62}
63
64Pointer::Pointer(Pointer &&P) : Offset(P.Offset), StorageKind(P.StorageKind) {
65 switch (StorageKind) {
66 case Storage::Int:
67 Int = P.Int;
68 break;
69 case Storage::Block:
70 BS = P.BS;
71 if (BS.Pointee)
72 BS.Pointee->replacePointer(Old: &P, New: this);
73 break;
74 case Storage::Fn:
75 Fn = P.Fn;
76 break;
77 case Storage::Typeid:
78 Typeid = P.Typeid;
79 break;
80 }
81}
82
83Pointer::~Pointer() {
84 if (!isBlockPointer())
85 return;
86
87 if (Block *Pointee = BS.Pointee) {
88 Pointee->removePointer(P: this);
89 BS.Pointee = nullptr;
90 Pointee->cleanup();
91 }
92}
93
94Pointer &Pointer::operator=(const Pointer &P) {
95 // If the current storage type is Block, we need to remove
96 // this pointer from the block.
97 if (isBlockPointer()) {
98 if (P.isBlockPointer() && this->block() == P.block()) {
99 Offset = P.Offset;
100 BS.Base = P.BS.Base;
101 return *this;
102 }
103
104 if (Block *Pointee = BS.Pointee) {
105 Pointee->removePointer(P: this);
106 BS.Pointee = nullptr;
107 Pointee->cleanup();
108 }
109 }
110
111 StorageKind = P.StorageKind;
112 Offset = P.Offset;
113
114 switch (StorageKind) {
115 case Storage::Int:
116 Int = P.Int;
117 break;
118 case Storage::Block:
119 BS = P.BS;
120
121 if (BS.Pointee)
122 BS.Pointee->addPointer(P: this);
123 break;
124 case Storage::Fn:
125 Fn = P.Fn;
126 break;
127 case Storage::Typeid:
128 Typeid = P.Typeid;
129 }
130 return *this;
131}
132
133Pointer &Pointer::operator=(Pointer &&P) {
134 // If the current storage type is Block, we need to remove
135 // this pointer from the block.
136 if (isBlockPointer()) {
137 if (P.isBlockPointer() && this->block() == P.block()) {
138 Offset = P.Offset;
139 BS.Base = P.BS.Base;
140 return *this;
141 }
142
143 if (Block *Pointee = BS.Pointee) {
144 Pointee->removePointer(P: this);
145 BS.Pointee = nullptr;
146 Pointee->cleanup();
147 }
148 }
149
150 StorageKind = P.StorageKind;
151 Offset = P.Offset;
152
153 switch (StorageKind) {
154 case Storage::Int:
155 Int = P.Int;
156 break;
157 case Storage::Block:
158 BS = P.BS;
159
160 if (BS.Pointee)
161 BS.Pointee->addPointer(P: this);
162 break;
163 case Storage::Fn:
164 Fn = P.Fn;
165 break;
166 case Storage::Typeid:
167 Typeid = P.Typeid;
168 }
169 return *this;
170}
171
172APValue Pointer::toAPValue(const ASTContext &ASTCtx) const {
173 llvm::SmallVector<APValue::LValuePathEntry, 5> Path;
174
175 if (isZero())
176 return APValue(static_cast<const Expr *>(nullptr), CharUnits::Zero(), Path,
177 /*IsOnePastEnd=*/false, /*IsNullPtr=*/true);
178 if (isIntegralPointer())
179 return APValue(static_cast<const Expr *>(nullptr),
180 CharUnits::fromQuantity(Quantity: asIntPointer().Value + this->Offset),
181 Path,
182 /*IsOnePastEnd=*/false, /*IsNullPtr=*/false);
183 if (isFunctionPointer()) {
184 const FunctionPointer &FP = asFunctionPointer();
185 if (const FunctionDecl *FD = FP.getFunction()->getDecl())
186 return APValue(FD, CharUnits::fromQuantity(Quantity: Offset), {},
187 /*OnePastTheEnd=*/false, /*IsNull=*/false);
188 return APValue(FP.getFunction()->getExpr(), CharUnits::fromQuantity(Quantity: Offset),
189 {},
190 /*OnePastTheEnd=*/false, /*IsNull=*/false);
191 }
192
193 if (isTypeidPointer()) {
194 TypeInfoLValue TypeInfo(Typeid.TypePtr);
195 return APValue(APValue::LValueBase::getTypeInfo(
196 LV: TypeInfo, TypeInfo: QualType(Typeid.TypeInfoType, 0)),
197 CharUnits::Zero(), {},
198 /*OnePastTheEnd=*/false, /*IsNull=*/false);
199 }
200
201 // Build the lvalue base from the block.
202 const Descriptor *Desc = getDeclDesc();
203 APValue::LValueBase Base;
204 if (const auto *VD = Desc->asValueDecl())
205 Base = VD;
206 else if (const auto *E = Desc->asExpr()) {
207 if (block()->isDynamic()) {
208 QualType AllocatedType = getDeclPtr().getFieldDesc()->getDataType(Ctx: ASTCtx);
209 DynamicAllocLValue DA(*block()->DynAllocId);
210 Base = APValue::LValueBase::getDynamicAlloc(LV: DA, Type: AllocatedType);
211 } else {
212 Base = E;
213 }
214 } else
215 llvm_unreachable("Invalid allocation type");
216
217 if (isUnknownSizeArray())
218 return APValue(Base, CharUnits::Zero(), Path,
219 /*IsOnePastEnd=*/isOnePastEnd(), /*IsNullPtr=*/false);
220
221 CharUnits Offset = CharUnits::Zero();
222
223 auto getFieldOffset = [&](const FieldDecl *FD) -> CharUnits {
224 // This shouldn't happen, but if it does, don't crash inside
225 // getASTRecordLayout.
226 if (FD->getParent()->isInvalidDecl())
227 return CharUnits::Zero();
228 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
229 unsigned FieldIndex = FD->getFieldIndex();
230 return ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex));
231 };
232
233 bool UsePath = true;
234 if (const ValueDecl *VD = getDeclDesc()->asValueDecl();
235 VD && VD->getType()->isReferenceType())
236 UsePath = false;
237
238 // Build the path into the object.
239 bool OnePastEnd = isOnePastEnd() && !isZeroSizeArray();
240 Pointer Ptr = *this;
241 while (Ptr.isField() || Ptr.isArrayElement()) {
242
243 if (Ptr.isArrayRoot()) {
244 // An array root may still be an array element itself.
245 if (Ptr.isArrayElement()) {
246 Ptr = Ptr.expand();
247 const Descriptor *Desc = Ptr.getFieldDesc();
248 unsigned Index = Ptr.getIndex();
249 QualType ElemType = Desc->getElemQualType();
250 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
251 if (Ptr.getArray().getFieldDesc()->IsArray)
252 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
253 Ptr = Ptr.getArray();
254 } else {
255 const Descriptor *Desc = Ptr.getFieldDesc();
256 const auto *Dcl = Desc->asDecl();
257 Path.push_back(Elt: APValue::LValuePathEntry({Dcl, /*IsVirtual=*/false}));
258
259 if (const auto *FD = dyn_cast_if_present<FieldDecl>(Val: Dcl))
260 Offset += getFieldOffset(FD);
261
262 Ptr = Ptr.getBase();
263 }
264 } else if (Ptr.isArrayElement()) {
265 Ptr = Ptr.expand();
266 const Descriptor *Desc = Ptr.getFieldDesc();
267 unsigned Index;
268 if (Ptr.isOnePastEnd()) {
269 Index = Ptr.getArray().getNumElems();
270 OnePastEnd = false;
271 } else
272 Index = Ptr.getIndex();
273
274 QualType ElemType = Desc->getElemQualType();
275 if (const auto *RD = ElemType->getAsRecordDecl();
276 RD && !RD->getDefinition()) {
277 // Ignore this for the offset.
278 } else {
279 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
280 }
281 if (Ptr.getArray().getFieldDesc()->IsArray)
282 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
283 Ptr = Ptr.getArray();
284 } else {
285 const Descriptor *Desc = Ptr.getFieldDesc();
286
287 // Create a path entry for the field.
288 if (const auto *BaseOrMember = Desc->asDecl()) {
289 bool IsVirtual = false;
290 if (const auto *FD = dyn_cast<FieldDecl>(Val: BaseOrMember)) {
291 Ptr = Ptr.getBase();
292 Offset += getFieldOffset(FD);
293 } else if (const auto *RD = dyn_cast<CXXRecordDecl>(Val: BaseOrMember)) {
294 IsVirtual = Ptr.isVirtualBaseClass();
295 Ptr = Ptr.getBase();
296 const Record *BaseRecord = Ptr.getRecord();
297
298 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(
299 D: cast<CXXRecordDecl>(Val: BaseRecord->getDecl()));
300 if (IsVirtual)
301 Offset += Layout.getVBaseClassOffset(VBase: RD);
302 else
303 Offset += Layout.getBaseClassOffset(Base: RD);
304
305 } else {
306 Ptr = Ptr.getBase();
307 }
308 Path.push_back(Elt: APValue::LValuePathEntry({BaseOrMember, IsVirtual}));
309 continue;
310 }
311 llvm_unreachable("Invalid field type");
312 }
313 }
314
315 // We assemble the LValuePath starting from the innermost pointer to the
316 // outermost one. SO in a.b.c, the first element in Path will refer to
317 // the field 'c', while later code expects it to refer to 'a'.
318 // Just invert the order of the elements.
319 std::reverse(first: Path.begin(), last: Path.end());
320
321 if (UsePath)
322 return APValue(Base, Offset, Path, OnePastEnd);
323
324 return APValue(Base, Offset, APValue::NoLValuePath());
325}
326
327void Pointer::print(llvm::raw_ostream &OS) const {
328 switch (StorageKind) {
329 case Storage::Block: {
330 const Block *B = BS.Pointee;
331 OS << "(Block) " << B << " {";
332
333 if (isRoot())
334 OS << "rootptr(" << BS.Base << "), ";
335 else
336 OS << BS.Base << ", ";
337
338 if (isElementPastEnd())
339 OS << "pastend, ";
340 else
341 OS << Offset << ", ";
342
343 if (B)
344 OS << B->getSize();
345 else
346 OS << "nullptr";
347 OS << "}";
348 } break;
349 case Storage::Int:
350 OS << "(Int) {";
351 OS << Int.Value << " + " << Offset << ", " << Int.Desc;
352 OS << "}";
353 break;
354 case Storage::Fn:
355 OS << "(Fn) { " << asFunctionPointer().getFunction() << " + " << Offset
356 << " }";
357 break;
358 case Storage::Typeid:
359 OS << "(Typeid) { " << (const void *)asTypeidPointer().TypePtr << ", "
360 << (const void *)asTypeidPointer().TypeInfoType << " + " << Offset
361 << "}";
362 }
363}
364
365/// Compute an offset that can be used to compare the pointer to another one
366/// with the same base. To get accurate results, we basically _have to_ compute
367/// the lvalue offset using the ASTRecordLayout.
368///
369/// FIXME: We're still mixing values from the record layout with our internal
370/// offsets, which will inevitably lead to cryptic errors.
371size_t Pointer::computeOffsetForComparison(const ASTContext &ASTCtx) const {
372 switch (StorageKind) {
373 case Storage::Int:
374 return Int.Value + Offset;
375 case Storage::Block:
376 // See below.
377 break;
378 case Storage::Fn:
379 return Fn.getIntegerRepresentation() + Offset;
380 case Storage::Typeid:
381 return reinterpret_cast<uintptr_t>(asTypeidPointer().TypePtr) + Offset;
382 }
383
384 size_t Result = 0;
385 Pointer P = *this;
386 while (true) {
387 if (P.isVirtualBaseClass()) {
388 Result += getInlineDesc()->Offset;
389 P = P.getBase();
390 continue;
391 }
392
393 if (P.isBaseClass()) {
394 if (P.getRecord()->getNumVirtualBases() > 0)
395 Result += P.getInlineDesc()->Offset;
396 P = P.getBase();
397 continue;
398 }
399 if (P.isArrayElement()) {
400 P = P.expand();
401 Result += (P.getIndex() * P.elemSize());
402 P = P.getArray();
403 continue;
404 }
405
406 if (P.isRoot()) {
407 if (P.isOnePastEnd())
408 Result +=
409 ASTCtx.getTypeSizeInChars(T: P.getDeclDesc()->getType()).getQuantity();
410 break;
411 }
412
413 assert(P.getField());
414 const Record *R = P.getBase().getRecord();
415 assert(R);
416
417 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: R->getDecl());
418 Result += ASTCtx
419 .toCharUnitsFromBits(
420 BitSize: Layout.getFieldOffset(FieldNo: P.getField()->getFieldIndex()))
421 .getQuantity();
422
423 if (P.isOnePastEnd())
424 Result +=
425 ASTCtx.getTypeSizeInChars(T: P.getField()->getType()).getQuantity();
426
427 P = P.getBase();
428 if (P.isRoot())
429 break;
430 }
431 return Result;
432}
433
434std::string Pointer::toDiagnosticString(const ASTContext &Ctx) const {
435 if (isZero())
436 return "nullptr";
437
438 if (isIntegralPointer())
439 return (Twine("&(") + Twine(asIntPointer().Value + Offset) + ")").str();
440
441 if (isFunctionPointer())
442 return asFunctionPointer().toDiagnosticString(Ctx);
443
444 return toAPValue(ASTCtx: Ctx).getAsString(Ctx, Ty: getType());
445}
446
447bool Pointer::isInitialized() const {
448 if (!isBlockPointer())
449 return true;
450
451 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor) &&
452 Offset == BS.Base) {
453 const auto &GD = block()->getBlockDesc<GlobalInlineDescriptor>();
454 return GD.InitState == GlobalInitState::Initialized;
455 }
456
457 assert(BS.Pointee && "Cannot check if null pointer was initialized");
458 const Descriptor *Desc = getFieldDesc();
459 assert(Desc);
460 if (Desc->isPrimitiveArray())
461 return isElementInitialized(Index: getIndex());
462
463 if (asBlockPointer().Base == 0)
464 return true;
465 // Field has its bit in an inline descriptor.
466 return getInlineDesc()->IsInitialized;
467}
468
469bool Pointer::isElementInitialized(unsigned Index) const {
470 if (!isBlockPointer())
471 return true;
472
473 const Descriptor *Desc = getFieldDesc();
474 assert(Desc);
475
476 if (isStatic() && BS.Base == 0)
477 return true;
478
479 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor) &&
480 Offset == BS.Base) {
481 const auto &GD = block()->getBlockDesc<GlobalInlineDescriptor>();
482 return GD.InitState == GlobalInitState::Initialized;
483 }
484
485 if (Desc->isPrimitiveArray()) {
486 InitMapPtr IM = getInitMap();
487
488 if (IM.allInitialized())
489 return true;
490
491 if (!IM.hasInitMap())
492 return false;
493 return IM->isElementInitialized(I: Index);
494 }
495 return isInitialized();
496}
497
498bool Pointer::isElementAlive(unsigned Index) const {
499 assert(getFieldDesc()->isPrimitiveArray());
500
501 InitMapPtr &IM = getInitMap();
502 if (!IM.hasInitMap())
503 return true;
504
505 if (IM.allInitialized())
506 return true;
507
508 return IM->isElementAlive(I: Index);
509}
510
511void Pointer::startLifetime() const {
512 if (!isBlockPointer())
513 return;
514 if (BS.Base < sizeof(InlineDescriptor))
515 return;
516
517 if (inArray()) {
518 const Descriptor *Desc = getFieldDesc();
519 InitMapPtr &IM = getInitMap();
520 if (!IM.hasInitMap())
521 IM.setInitMap(new InitMap(Desc->getNumElems(), IM.allInitialized()));
522
523 IM->startElementLifetime(I: getIndex());
524 assert(isArrayRoot() || (this->getLifetime() == Lifetime::Started));
525 return;
526 }
527
528 getInlineDesc()->LifeState = Lifetime::Started;
529}
530
531void Pointer::endLifetime() const {
532 if (!isBlockPointer())
533 return;
534 if (BS.Base < sizeof(InlineDescriptor))
535 return;
536
537 if (inArray()) {
538 const Descriptor *Desc = getFieldDesc();
539 InitMapPtr &IM = getInitMap();
540 if (!IM.hasInitMap())
541 IM.setInitMap(new InitMap(Desc->getNumElems(), IM.allInitialized()));
542
543 IM->endElementLifetime(I: getIndex());
544 assert(isArrayRoot() || (this->getLifetime() == Lifetime::Ended));
545 return;
546 }
547
548 getInlineDesc()->LifeState = Lifetime::Ended;
549}
550
551void Pointer::initialize() const {
552 if (!isBlockPointer())
553 return;
554
555 assert(BS.Pointee && "Cannot initialize null pointer");
556
557 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor) &&
558 Offset == BS.Base) {
559 auto &GD = BS.Pointee->getBlockDesc<GlobalInlineDescriptor>();
560 GD.InitState = GlobalInitState::Initialized;
561 return;
562 }
563
564 const Descriptor *Desc = getFieldDesc();
565 assert(Desc);
566 if (Desc->isPrimitiveArray()) {
567 if (Desc->getNumElems() != 0)
568 initializeElement(Index: getIndex());
569 return;
570 }
571
572 // Field has its bit in an inline descriptor.
573 assert(BS.Base != 0 && "Only composite fields can be initialised");
574 getInlineDesc()->IsInitialized = true;
575}
576
577void Pointer::initializeElement(unsigned Index) const {
578 // Primitive global arrays don't have an initmap.
579 if (isStatic() && BS.Base == 0)
580 return;
581
582 assert(Index < getFieldDesc()->getNumElems());
583
584 InitMapPtr &IM = getInitMap();
585 if (IM.allInitialized())
586 return;
587
588 if (!IM.hasInitMap()) {
589 const Descriptor *Desc = getFieldDesc();
590 IM.setInitMap(new InitMap(Desc->getNumElems()));
591 }
592 assert(IM.hasInitMap());
593
594 if (IM->initializeElement(I: Index))
595 IM.noteAllInitialized();
596}
597
598void Pointer::initializeAllElements() const {
599 assert(getFieldDesc()->isPrimitiveArray());
600 assert(isArrayRoot());
601
602 getInitMap().noteAllInitialized();
603}
604
605bool Pointer::allElementsInitialized() const {
606 assert(getFieldDesc()->isPrimitiveArray());
607 assert(isArrayRoot());
608
609 if (isStatic() && BS.Base == 0)
610 return true;
611
612 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor) &&
613 Offset == BS.Base) {
614 const auto &GD = block()->getBlockDesc<GlobalInlineDescriptor>();
615 return GD.InitState == GlobalInitState::Initialized;
616 }
617
618 InitMapPtr IM = getInitMap();
619 return IM.allInitialized();
620}
621
622bool Pointer::allElementsAlive() const {
623 assert(getFieldDesc()->isPrimitiveArray());
624 assert(isArrayRoot());
625
626 if (isStatic() && BS.Base == 0)
627 return true;
628
629 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor) &&
630 Offset == BS.Base) {
631 const auto &GD = block()->getBlockDesc<GlobalInlineDescriptor>();
632 return GD.InitState == GlobalInitState::Initialized;
633 }
634
635 InitMapPtr &IM = getInitMap();
636 return IM.allInitialized() || (IM.hasInitMap() && IM->allElementsAlive());
637}
638
639void Pointer::activate() const {
640 // Field has its bit in an inline descriptor.
641 assert(BS.Base != 0 && "Only composite fields can be activated");
642
643 if (isRoot() && BS.Base == sizeof(GlobalInlineDescriptor))
644 return;
645 if (!getInlineDesc()->InUnion)
646 return;
647
648 std::function<void(Pointer &)> activate;
649 activate = [&activate](Pointer &P) -> void {
650 P.getInlineDesc()->IsActive = true;
651 if (const Record *R = P.getRecord(); R && !R->isUnion()) {
652 for (const Record::Field &F : R->fields()) {
653 Pointer FieldPtr = P.atField(Off: F.Offset);
654 if (!FieldPtr.getInlineDesc()->IsActive)
655 activate(FieldPtr);
656 }
657 // FIXME: Bases?
658 }
659 };
660
661 std::function<void(Pointer &)> deactivate;
662 deactivate = [&deactivate](Pointer &P) -> void {
663 P.getInlineDesc()->IsActive = false;
664
665 if (const Record *R = P.getRecord()) {
666 for (const Record::Field &F : R->fields()) {
667 Pointer FieldPtr = P.atField(Off: F.Offset);
668 if (FieldPtr.getInlineDesc()->IsActive)
669 deactivate(FieldPtr);
670 }
671 // FIXME: Bases?
672 }
673 };
674
675 Pointer B = *this;
676 while (!B.isRoot() && B.inUnion()) {
677 activate(B);
678
679 // When walking up the pointer chain, deactivate
680 // all union child pointers that aren't on our path.
681 Pointer Cur = B;
682 B = B.getBase();
683 if (const Record *BR = B.getRecord(); BR && BR->isUnion()) {
684 for (const Record::Field &F : BR->fields()) {
685 Pointer FieldPtr = B.atField(Off: F.Offset);
686 if (FieldPtr != Cur)
687 deactivate(FieldPtr);
688 }
689 }
690 }
691}
692
693void Pointer::deactivate() const {
694 // TODO: this only appears in constructors, so nothing to deactivate.
695}
696
697bool Pointer::hasSameBase(const Pointer &A, const Pointer &B) {
698 // Two null pointers always have the same base.
699 if (A.isZero() && B.isZero())
700 return true;
701
702 if (A.isIntegralPointer() && B.isIntegralPointer())
703 return true;
704 if (A.isFunctionPointer() && B.isFunctionPointer())
705 return true;
706 if (A.isTypeidPointer() && B.isTypeidPointer())
707 return true;
708
709 if (A.StorageKind != B.StorageKind)
710 return false;
711
712 return A.asBlockPointer().Pointee == B.asBlockPointer().Pointee;
713}
714
715bool Pointer::pointToSameBlock(const Pointer &A, const Pointer &B) {
716 if (!A.isBlockPointer() || !B.isBlockPointer())
717 return false;
718 return A.block() == B.block();
719}
720
721bool Pointer::hasSameArray(const Pointer &A, const Pointer &B) {
722 return hasSameBase(A, B) && A.BS.Base == B.BS.Base &&
723 A.getFieldDesc()->IsArray;
724}
725
726bool Pointer::pointsToLiteral() const {
727 if (isZero() || !isBlockPointer())
728 return false;
729
730 if (block()->isDynamic())
731 return false;
732
733 const Expr *E = block()->getDescriptor()->asExpr();
734 return E && !isa<MaterializeTemporaryExpr, StringLiteral>(Val: E);
735}
736
737bool Pointer::pointsToStringLiteral() const {
738 if (isZero() || !isBlockPointer())
739 return false;
740
741 if (block()->isDynamic())
742 return false;
743
744 const Expr *E = block()->getDescriptor()->asExpr();
745 return isa_and_nonnull<StringLiteral>(Val: E);
746}
747
748std::optional<std::pair<Pointer, Pointer>>
749Pointer::computeSplitPoint(const Pointer &A, const Pointer &B) {
750 if (!A.isBlockPointer() || !B.isBlockPointer())
751 return std::nullopt;
752
753 if (A.asBlockPointer().Pointee != B.asBlockPointer().Pointee)
754 return std::nullopt;
755 if (A.isRoot() && B.isRoot())
756 return std::nullopt;
757
758 if (A == B)
759 return std::make_pair(x: A, y: B);
760
761 auto getBase = [](const Pointer &P) -> Pointer {
762 if (P.isArrayElement())
763 return P.expand().getArray();
764 return P.getBase();
765 };
766
767 Pointer IterA = A;
768 Pointer IterB = B;
769 Pointer CurA = IterA;
770 Pointer CurB = IterB;
771 for (;;) {
772 if (IterA.asBlockPointer().Base > IterB.asBlockPointer().Base) {
773 CurA = IterA;
774 IterA = getBase(IterA);
775 } else {
776 CurB = IterB;
777 IterB = getBase(IterB);
778 }
779
780 if (IterA == IterB)
781 return std::make_pair(x&: CurA, y&: CurB);
782
783 if (IterA.isRoot() && IterB.isRoot())
784 return std::nullopt;
785 }
786
787 llvm_unreachable("The loop above should've returned.");
788}
789
790std::optional<APValue> Pointer::toRValue(const Context &Ctx,
791 QualType ResultType) const {
792 const ASTContext &ASTCtx = Ctx.getASTContext();
793 assert(!ResultType.isNull());
794 // Method to recursively traverse composites.
795 std::function<bool(QualType, const Pointer &, APValue &)> Composite;
796 Composite = [&Composite, &Ctx, &ASTCtx](QualType Ty, const Pointer &Ptr,
797 APValue &R) {
798 if (const auto *AT = Ty->getAs<AtomicType>())
799 Ty = AT->getValueType();
800
801 // Invalid pointers.
802 if (Ptr.isDummy() || !Ptr.isLive() || !Ptr.isBlockPointer() ||
803 Ptr.isPastEnd())
804 return false;
805
806 // Primitive values.
807 if (OptPrimType T = Ctx.classify(T: Ty)) {
808 TYPE_SWITCH(*T, R = Ptr.deref<T>().toAPValue(ASTCtx));
809 return true;
810 }
811
812 if (const auto *RT = Ty->getAsCanonical<RecordType>()) {
813 const auto *Record = Ptr.getRecord();
814 assert(Record && "Missing record descriptor");
815
816 bool Ok = true;
817 if (RT->getDecl()->isUnion()) {
818 const FieldDecl *ActiveField = nullptr;
819 APValue Value;
820 for (const auto &F : Record->fields()) {
821 const Pointer &FP = Ptr.atField(Off: F.Offset);
822 QualType FieldTy = F.Decl->getType();
823 if (FP.isActive()) {
824 if (OptPrimType T = Ctx.classify(T: FieldTy)) {
825 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
826 } else {
827 Ok &= Composite(FieldTy, FP, Value);
828 }
829 ActiveField = FP.getFieldDesc()->asFieldDecl();
830 break;
831 }
832 }
833 R = APValue(ActiveField, Value);
834 } else {
835 unsigned NF = Record->getNumFields();
836 unsigned NB = Record->getNumBases();
837 unsigned NV = Ptr.isBaseClass() ? 0 : Record->getNumVirtualBases();
838
839 R = APValue(APValue::UninitStruct(), NB, NF);
840
841 for (unsigned I = 0; I < NF; ++I) {
842 const Record::Field *FD = Record->getField(I);
843 QualType FieldTy = FD->Decl->getType();
844 const Pointer &FP = Ptr.atField(Off: FD->Offset);
845 APValue &Value = R.getStructField(i: I);
846
847 if (OptPrimType T = Ctx.classify(T: FieldTy)) {
848 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
849 } else {
850 Ok &= Composite(FieldTy, FP, Value);
851 }
852 }
853
854 for (unsigned I = 0; I < NB; ++I) {
855 const Record::Base *BD = Record->getBase(I);
856 QualType BaseTy = Ctx.getASTContext().getCanonicalTagType(TD: BD->Decl);
857 const Pointer &BP = Ptr.atField(Off: BD->Offset);
858 Ok &= Composite(BaseTy, BP, R.getStructBase(i: I));
859 }
860
861 for (unsigned I = 0; I < NV; ++I) {
862 const Record::Base *VD = Record->getVirtualBase(I);
863 QualType VirtBaseTy =
864 Ctx.getASTContext().getCanonicalTagType(TD: VD->Decl);
865 const Pointer &VP = Ptr.atField(Off: VD->Offset);
866 Ok &= Composite(VirtBaseTy, VP, R.getStructBase(i: NB + I));
867 }
868 }
869 return Ok;
870 }
871
872 if (Ty->isIncompleteArrayType()) {
873 R = APValue(APValue::UninitArray(), 0, 0);
874 return true;
875 }
876
877 if (const auto *AT = Ty->getAsArrayTypeUnsafe()) {
878 const size_t NumElems = Ptr.getNumElems();
879 QualType ElemTy = AT->getElementType();
880 R = APValue(APValue::UninitArray{}, NumElems, NumElems);
881
882 bool Ok = true;
883 OptPrimType ElemT = Ctx.classify(T: ElemTy);
884 for (unsigned I = 0; I != NumElems; ++I) {
885 APValue &Slot = R.getArrayInitializedElt(I);
886 if (ElemT) {
887 TYPE_SWITCH(*ElemT, Slot = Ptr.elem<T>(I).toAPValue(ASTCtx));
888 } else {
889 Ok &= Composite(ElemTy, Ptr.atIndex(Idx: I).narrow(), Slot);
890 }
891 }
892 return Ok;
893 }
894
895 // Complex types.
896 if (const auto *CT = Ty->getAs<ComplexType>()) {
897 // Can happen via C casts.
898 if (!Ptr.getFieldDesc()->isPrimitiveArray())
899 return false;
900
901 QualType ElemTy = CT->getElementType();
902 if (ElemTy->isIntegerType()) {
903 OptPrimType ElemT = Ctx.classify(T: ElemTy);
904 assert(ElemT);
905 INT_TYPE_SWITCH(*ElemT, {
906 auto V1 = Ptr.elem<T>(0);
907 auto V2 = Ptr.elem<T>(1);
908 R = APValue(V1.toAPSInt(), V2.toAPSInt());
909 return true;
910 });
911 } else if (ElemTy->isFloatingType()) {
912 R = APValue(Ptr.elem<Floating>(I: 0).getAPFloat(),
913 Ptr.elem<Floating>(I: 1).getAPFloat());
914 return true;
915 }
916 return false;
917 }
918
919 // Vector types.
920 if (const auto *VT = Ty->getAs<VectorType>()) {
921 assert(Ptr.getFieldDesc()->isPrimitiveArray());
922 QualType ElemTy = VT->getElementType();
923 PrimType ElemT = *Ctx.classify(T: ElemTy);
924
925 SmallVector<APValue> Values;
926 Values.reserve(N: VT->getNumElements());
927 for (unsigned I = 0; I != VT->getNumElements(); ++I) {
928 TYPE_SWITCH(ElemT,
929 { Values.push_back(Ptr.elem<T>(I).toAPValue(ASTCtx)); });
930 }
931
932 assert(Values.size() == VT->getNumElements());
933 R = APValue(Values.data(), Values.size());
934 return true;
935 }
936
937 llvm_unreachable("invalid value to return");
938 };
939
940 // Invalid to read from.
941 if (isDummy() || !isLive() || isPastEnd())
942 return std::nullopt;
943
944 // We can return these as rvalues, but we can't deref() them.
945 if (isZero() || isIntegralPointer())
946 return toAPValue(ASTCtx);
947
948 // Just load primitive types.
949 if (OptPrimType T = Ctx.classify(T: ResultType)) {
950 TYPE_SWITCH(*T, return this->deref<T>().toAPValue(ASTCtx));
951 }
952
953 // Return the composite type.
954 APValue Result;
955 if (!Composite(ResultType, *this, Result))
956 return std::nullopt;
957 return Result;
958}
959
960std::optional<IntPointer> IntPointer::atOffset(const ASTContext &ASTCtx,
961 unsigned Offset) const {
962 if (!this->Desc)
963 return *this;
964 const Record *R = this->Desc->ElemRecord;
965 if (!R)
966 return *this;
967
968 const Record::Field *F = nullptr;
969 for (auto &It : R->fields()) {
970 if (It.Offset == Offset) {
971 F = &It;
972 break;
973 }
974 }
975 if (!F)
976 return *this;
977
978 const FieldDecl *FD = F->Decl;
979 if (FD->getParent()->isInvalidDecl())
980 return std::nullopt;
981
982 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
983 unsigned FieldIndex = FD->getFieldIndex();
984 uint64_t FieldOffset =
985 ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex))
986 .getQuantity();
987 return IntPointer{.Desc: F->Desc, .Value: this->Value + FieldOffset};
988}
989
990IntPointer IntPointer::baseCast(const ASTContext &ASTCtx,
991 unsigned BaseOffset) const {
992 if (!Desc) {
993 assert(Value == 0);
994 return *this;
995 }
996 const Record *R = Desc->ElemRecord;
997 const Descriptor *BaseDesc = nullptr;
998
999 // This iterates over bases and checks for the proper offset. That's
1000 // potentially slow but this case really shouldn't happen a lot.
1001 for (const Record::Base &B : R->bases()) {
1002 if (B.Offset == BaseOffset) {
1003 BaseDesc = B.Desc;
1004 break;
1005 }
1006 }
1007 assert(BaseDesc);
1008
1009 // Adjust the offset value based on the information from the record layout.
1010 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: R->getDecl());
1011 CharUnits BaseLayoutOffset =
1012 Layout.getBaseClassOffset(Base: cast<CXXRecordDecl>(Val: BaseDesc->asDecl()));
1013
1014 return {.Desc: BaseDesc, .Value: Value + BaseLayoutOffset.getQuantity()};
1015}
1016