1//===----- EvaluationResult.cpp - Result class for the VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "EvaluationResult.h"
10#include "InterpState.h"
11#include "Record.h"
12#include "clang/AST/ExprCXX.h"
13#include "llvm/ADT/SetVector.h"
14
15namespace clang {
16namespace interp {
17
18APValue EvaluationResult::toAPValue() const {
19 assert(!empty());
20 switch (Kind) {
21 case LValue:
22 // Either a pointer or a function pointer.
23 if (const auto *P = std::get_if<Pointer>(ptr: &Value))
24 return P->toAPValue(ASTCtx: Ctx->getASTContext());
25 else if (const auto *FP = std::get_if<FunctionPointer>(ptr: &Value))
26 return FP->toAPValue(Ctx->getASTContext());
27 else
28 llvm_unreachable("Unhandled LValue type");
29 break;
30 case RValue:
31 return std::get<APValue>(v: Value);
32 case Valid:
33 return APValue();
34 default:
35 llvm_unreachable("Unhandled result kind?");
36 }
37}
38
39std::optional<APValue> EvaluationResult::toRValue() const {
40 if (Kind == RValue)
41 return toAPValue();
42
43 assert(Kind == LValue);
44
45 // We have a pointer and want an RValue.
46 if (const auto *P = std::get_if<Pointer>(ptr: &Value))
47 return P->toRValue(Ctx: *Ctx, ResultType: getSourceType());
48 else if (const auto *FP = std::get_if<FunctionPointer>(ptr: &Value)) // Nope
49 return FP->toAPValue(Ctx->getASTContext());
50 llvm_unreachable("Unhandled lvalue kind");
51}
52
53static void DiagnoseUninitializedSubobject(InterpState &S, SourceLocation Loc,
54 const FieldDecl *SubObjDecl) {
55 assert(SubObjDecl && "Subobject declaration does not exist");
56 S.FFDiag(Loc, DiagId: diag::note_constexpr_uninitialized)
57 << /*(name)*/ 1 << SubObjDecl;
58 S.Note(Loc: SubObjDecl->getLocation(),
59 DiagId: diag::note_constexpr_subobject_declared_here);
60}
61
62static bool CheckFieldsInitialized(InterpState &S, SourceLocation Loc,
63 const Pointer &BasePtr, const Record *R);
64
65static bool CheckArrayInitialized(InterpState &S, SourceLocation Loc,
66 const Pointer &BasePtr,
67 const ConstantArrayType *CAT) {
68 bool Result = true;
69 size_t NumElems = CAT->getZExtSize();
70 QualType ElemType = CAT->getElementType();
71
72 if (ElemType->isRecordType()) {
73 const Record *R = BasePtr.getElemRecord();
74 for (size_t I = 0; I != NumElems; ++I) {
75 Pointer ElemPtr = BasePtr.atIndex(Idx: I).narrow();
76 Result &= CheckFieldsInitialized(S, Loc, BasePtr: ElemPtr, R);
77 }
78 } else if (const auto *ElemCAT = dyn_cast<ConstantArrayType>(Val&: ElemType)) {
79 for (size_t I = 0; I != NumElems; ++I) {
80 Pointer ElemPtr = BasePtr.atIndex(Idx: I).narrow();
81 Result &= CheckArrayInitialized(S, Loc, BasePtr: ElemPtr, CAT: ElemCAT);
82 }
83 } else {
84 for (size_t I = 0; I != NumElems; ++I) {
85 if (!BasePtr.atIndex(Idx: I).isInitialized()) {
86 DiagnoseUninitializedSubobject(S, Loc, SubObjDecl: BasePtr.getField());
87 Result = false;
88 }
89 }
90 }
91
92 return Result;
93}
94
95static bool CheckFieldsInitialized(InterpState &S, SourceLocation Loc,
96 const Pointer &BasePtr, const Record *R) {
97 assert(R);
98 bool Result = true;
99 // Check all fields of this record are initialized.
100 for (const Record::Field &F : R->fields()) {
101 Pointer FieldPtr = BasePtr.atField(Off: F.Offset);
102 QualType FieldType = F.Decl->getType();
103
104 // Don't check inactive union members.
105 if (R->isUnion() && !FieldPtr.isActive())
106 continue;
107
108 if (FieldType->isRecordType()) {
109 Result &= CheckFieldsInitialized(S, Loc, BasePtr: FieldPtr, R: FieldPtr.getRecord());
110 } else if (FieldType->isIncompleteArrayType()) {
111 // Nothing to do here.
112 } else if (F.Decl->isUnnamedBitField()) {
113 // Nothing do do here.
114 } else if (FieldType->isArrayType()) {
115 const auto *CAT =
116 cast<ConstantArrayType>(Val: FieldType->getAsArrayTypeUnsafe());
117 Result &= CheckArrayInitialized(S, Loc, BasePtr: FieldPtr, CAT);
118 } else if (!FieldPtr.isInitialized()) {
119 DiagnoseUninitializedSubobject(S, Loc, SubObjDecl: F.Decl);
120 Result = false;
121 }
122 }
123
124 // Check Fields in all bases
125 for (const Record::Base &B : R->bases()) {
126 Pointer P = BasePtr.atField(Off: B.Offset);
127 if (!P.isInitialized()) {
128 const Descriptor *Desc = BasePtr.getDeclDesc();
129 if (Desc->asDecl())
130 S.FFDiag(Loc: BasePtr.getDeclDesc()->asDecl()->getLocation(),
131 DiagId: diag::note_constexpr_uninitialized_base)
132 << B.Desc->getType();
133 else
134 S.FFDiag(Loc: BasePtr.getDeclDesc()->asExpr()->getExprLoc(),
135 DiagId: diag::note_constexpr_uninitialized_base)
136 << B.Desc->getType();
137
138 return false;
139 }
140 Result &= CheckFieldsInitialized(S, Loc, BasePtr: P, R: B.R);
141 }
142
143 // TODO: Virtual bases
144
145 return Result;
146}
147
148bool EvaluationResult::checkFullyInitialized(InterpState &S,
149 const Pointer &Ptr) const {
150 assert(Source);
151 assert(empty());
152
153 if (Ptr.isZero())
154 return true;
155
156 // We can't inspect dead pointers at all. Return true here so we can
157 // diagnose them later.
158 if (!Ptr.isLive())
159 return true;
160
161 SourceLocation InitLoc;
162 if (const auto *D = Source.dyn_cast<const Decl *>())
163 InitLoc = cast<VarDecl>(Val: D)->getAnyInitializer()->getExprLoc();
164 else if (const auto *E = Source.dyn_cast<const Expr *>())
165 InitLoc = E->getExprLoc();
166
167 if (const Record *R = Ptr.getRecord())
168 return CheckFieldsInitialized(S, Loc: InitLoc, BasePtr: Ptr, R);
169
170 if (const auto *CAT = dyn_cast_if_present<ConstantArrayType>(
171 Val: Ptr.getType()->getAsArrayTypeUnsafe()))
172 return CheckArrayInitialized(S, Loc: InitLoc, BasePtr: Ptr, CAT);
173
174 return true;
175}
176
177static void collectBlocks(const Pointer &Ptr,
178 llvm::SetVector<const Block *> &Blocks) {
179 auto isUsefulPtr = [](const Pointer &P) -> bool {
180 return P.isLive() && !P.isZero() && !P.isDummy() &&
181 !P.isUnknownSizeArray() && !P.isOnePastEnd() && P.isBlockPointer();
182 };
183
184 if (!isUsefulPtr(Ptr))
185 return;
186
187 Blocks.insert(X: Ptr.block());
188
189 const Descriptor *Desc = Ptr.getFieldDesc();
190 if (!Desc)
191 return;
192
193 if (const Record *R = Desc->ElemRecord) {
194 for (const Record::Field &F : R->fields()) {
195 const Pointer &FieldPtr = Ptr.atField(Off: F.Offset);
196 assert(FieldPtr.block() == Ptr.block());
197 collectBlocks(Ptr: FieldPtr, Blocks);
198 }
199 } else if (Desc->isPrimitive() && Desc->getPrimType() == PT_Ptr) {
200 const Pointer &Pointee = Ptr.deref<Pointer>();
201 if (isUsefulPtr(Pointee) && !Blocks.contains(key: Pointee.block()))
202 collectBlocks(Ptr: Pointee, Blocks);
203
204 } else if (Desc->isPrimitiveArray() && Desc->getPrimType() == PT_Ptr) {
205 for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
206 const Pointer &ElemPointee = Ptr.atIndex(Idx: I).deref<Pointer>();
207 if (isUsefulPtr(ElemPointee) && !Blocks.contains(key: ElemPointee.block()))
208 collectBlocks(Ptr: ElemPointee, Blocks);
209 }
210 } else if (Desc->isCompositeArray()) {
211 for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
212 const Pointer &ElemPtr = Ptr.atIndex(Idx: I).narrow();
213 collectBlocks(Ptr: ElemPtr, Blocks);
214 }
215 }
216}
217
218bool EvaluationResult::checkReturnValue(InterpState &S, const Context &Ctx,
219 const Pointer &Ptr,
220 const SourceInfo &Info) {
221 // Collect all blocks that this pointer (transitively) points to and
222 // return false if any of them is a dynamic block.
223 llvm::SetVector<const Block *> Blocks;
224
225 collectBlocks(Ptr, Blocks);
226
227 for (const Block *B : Blocks) {
228 if (B->isDynamic()) {
229 assert(B->getDescriptor());
230 assert(B->getDescriptor()->asExpr());
231
232 S.FFDiag(SI: Info, DiagId: diag::note_constexpr_dynamic_alloc)
233 << Ptr.getType()->isReferenceType() << !Ptr.isRoot();
234 S.Note(Loc: B->getDescriptor()->asExpr()->getExprLoc(),
235 DiagId: diag::note_constexpr_dynamic_alloc_here);
236 return false;
237 }
238 }
239
240 return true;
241}
242
243} // namespace interp
244} // namespace clang
245