1 | //===--- Context.cpp - Context for the constexpr VM -------------*- C++ -*-===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | |
9 | #include "Context.h" |
10 | #include "ByteCodeEmitter.h" |
11 | #include "Compiler.h" |
12 | #include "EvalEmitter.h" |
13 | #include "Interp.h" |
14 | #include "InterpFrame.h" |
15 | #include "InterpStack.h" |
16 | #include "PrimType.h" |
17 | #include "Program.h" |
18 | #include "clang/AST/Expr.h" |
19 | #include "clang/Basic/TargetInfo.h" |
20 | |
21 | using namespace clang; |
22 | using namespace clang::interp; |
23 | |
24 | Context::Context(ASTContext &Ctx) : Ctx(Ctx), P(new Program(*this)) { |
25 | this->ShortWidth = Ctx.getTargetInfo().getShortWidth(); |
26 | this->IntWidth = Ctx.getTargetInfo().getIntWidth(); |
27 | this->LongWidth = Ctx.getTargetInfo().getLongWidth(); |
28 | this->LongLongWidth = Ctx.getTargetInfo().getLongLongWidth(); |
29 | assert(Ctx.getTargetInfo().getCharWidth() == 8 && |
30 | "We're assuming 8 bit chars" ); |
31 | } |
32 | |
33 | Context::~Context() {} |
34 | |
35 | bool Context::isPotentialConstantExpr(State &Parent, const FunctionDecl *FD) { |
36 | assert(Stk.empty()); |
37 | |
38 | // Get a function handle. |
39 | const Function *Func = getOrCreateFunction(FuncDecl: FD); |
40 | if (!Func) |
41 | return false; |
42 | |
43 | // Compile the function. |
44 | Compiler<ByteCodeEmitter>(*this, *P).compileFunc( |
45 | FuncDecl: FD, Func: const_cast<Function *>(Func)); |
46 | |
47 | ++EvalID; |
48 | // And run it. |
49 | if (!Run(Parent, Func)) |
50 | return false; |
51 | |
52 | return Func->isValid(); |
53 | } |
54 | |
55 | bool Context::evaluateAsRValue(State &Parent, const Expr *E, APValue &Result) { |
56 | ++EvalID; |
57 | bool Recursing = !Stk.empty(); |
58 | size_t StackSizeBefore = Stk.size(); |
59 | Compiler<EvalEmitter> C(*this, *P, Parent, Stk); |
60 | |
61 | auto Res = C.interpretExpr(E, /*ConvertResultToRValue=*/E->isGLValue()); |
62 | |
63 | if (Res.isInvalid()) { |
64 | C.cleanup(); |
65 | Stk.clearTo(NewSize: StackSizeBefore); |
66 | return false; |
67 | } |
68 | |
69 | if (!Recursing) { |
70 | assert(Stk.empty()); |
71 | C.cleanup(); |
72 | #ifndef NDEBUG |
73 | // Make sure we don't rely on some value being still alive in |
74 | // InterpStack memory. |
75 | Stk.clearTo(StackSizeBefore); |
76 | #endif |
77 | } |
78 | |
79 | Result = Res.toAPValue(); |
80 | |
81 | return true; |
82 | } |
83 | |
84 | bool Context::evaluate(State &Parent, const Expr *E, APValue &Result, |
85 | ConstantExprKind Kind) { |
86 | ++EvalID; |
87 | bool Recursing = !Stk.empty(); |
88 | size_t StackSizeBefore = Stk.size(); |
89 | Compiler<EvalEmitter> C(*this, *P, Parent, Stk); |
90 | |
91 | auto Res = C.interpretExpr(E, /*ConvertResultToRValue=*/false, |
92 | /*DestroyToplevelScope=*/true); |
93 | if (Res.isInvalid()) { |
94 | C.cleanup(); |
95 | Stk.clearTo(NewSize: StackSizeBefore); |
96 | return false; |
97 | } |
98 | |
99 | if (!Recursing) { |
100 | assert(Stk.empty()); |
101 | C.cleanup(); |
102 | #ifndef NDEBUG |
103 | // Make sure we don't rely on some value being still alive in |
104 | // InterpStack memory. |
105 | Stk.clearTo(StackSizeBefore); |
106 | #endif |
107 | } |
108 | |
109 | Result = Res.toAPValue(); |
110 | return true; |
111 | } |
112 | |
113 | bool Context::evaluateAsInitializer(State &Parent, const VarDecl *VD, |
114 | APValue &Result) { |
115 | ++EvalID; |
116 | bool Recursing = !Stk.empty(); |
117 | size_t StackSizeBefore = Stk.size(); |
118 | Compiler<EvalEmitter> C(*this, *P, Parent, Stk); |
119 | |
120 | bool CheckGlobalInitialized = |
121 | shouldBeGloballyIndexed(VD) && |
122 | (VD->getType()->isRecordType() || VD->getType()->isArrayType()); |
123 | auto Res = C.interpretDecl(VD, CheckFullyInitialized: CheckGlobalInitialized); |
124 | if (Res.isInvalid()) { |
125 | C.cleanup(); |
126 | Stk.clearTo(NewSize: StackSizeBefore); |
127 | |
128 | return false; |
129 | } |
130 | |
131 | if (!Recursing) { |
132 | assert(Stk.empty()); |
133 | C.cleanup(); |
134 | #ifndef NDEBUG |
135 | // Make sure we don't rely on some value being still alive in |
136 | // InterpStack memory. |
137 | Stk.clearTo(StackSizeBefore); |
138 | #endif |
139 | } |
140 | |
141 | Result = Res.toAPValue(); |
142 | return true; |
143 | } |
144 | |
145 | template <typename ResultT> |
146 | bool Context::evaluateStringRepr(State &Parent, const Expr *SizeExpr, |
147 | const Expr *PtrExpr, ResultT &Result) { |
148 | assert(Stk.empty()); |
149 | Compiler<EvalEmitter> C(*this, *P, Parent, Stk); |
150 | |
151 | // Evaluate size value. |
152 | APValue SizeValue; |
153 | if (!evaluateAsRValue(Parent, E: SizeExpr, Result&: SizeValue)) |
154 | return false; |
155 | |
156 | if (!SizeValue.isInt()) |
157 | return false; |
158 | uint64_t Size = SizeValue.getInt().getZExtValue(); |
159 | |
160 | auto PtrRes = C.interpretAsPointer(E: PtrExpr, PtrCB: [&](const Pointer &Ptr) { |
161 | if (Size == 0) { |
162 | if constexpr (std::is_same_v<ResultT, APValue>) |
163 | Result = APValue(APValue::UninitArray{}, 0, 0); |
164 | return true; |
165 | } |
166 | |
167 | if (!Ptr.isLive() || !Ptr.getFieldDesc()->isPrimitiveArray()) |
168 | return false; |
169 | |
170 | // Must be char. |
171 | if (Ptr.getFieldDesc()->getElemSize() != 1 /*bytes*/) |
172 | return false; |
173 | |
174 | if (Size > Ptr.getNumElems()) { |
175 | Parent.FFDiag(E: SizeExpr, DiagId: diag::note_constexpr_access_past_end) << AK_Read; |
176 | Size = Ptr.getNumElems(); |
177 | } |
178 | |
179 | if constexpr (std::is_same_v<ResultT, APValue>) { |
180 | QualType CharTy = PtrExpr->getType()->getPointeeType(); |
181 | Result = APValue(APValue::UninitArray{}, Size, Size); |
182 | for (uint64_t I = 0; I != Size; ++I) { |
183 | if (std::optional<APValue> ElemVal = |
184 | Ptr.atIndex(Idx: I).toRValue(Ctx: *this, ResultType: CharTy)) |
185 | Result.getArrayInitializedElt(I) = *ElemVal; |
186 | else |
187 | return false; |
188 | } |
189 | } else { |
190 | assert((std::is_same_v<ResultT, std::string>)); |
191 | if (Size < Result.max_size()) |
192 | Result.resize(Size); |
193 | Result.assign(reinterpret_cast<const char *>(Ptr.getRawAddress()), Size); |
194 | } |
195 | |
196 | return true; |
197 | }); |
198 | |
199 | if (PtrRes.isInvalid()) { |
200 | C.cleanup(); |
201 | Stk.clear(); |
202 | return false; |
203 | } |
204 | |
205 | return true; |
206 | } |
207 | |
208 | bool Context::evaluateCharRange(State &Parent, const Expr *SizeExpr, |
209 | const Expr *PtrExpr, APValue &Result) { |
210 | assert(SizeExpr); |
211 | assert(PtrExpr); |
212 | |
213 | return evaluateStringRepr(Parent, SizeExpr, PtrExpr, Result); |
214 | } |
215 | |
216 | bool Context::evaluateCharRange(State &Parent, const Expr *SizeExpr, |
217 | const Expr *PtrExpr, std::string &Result) { |
218 | assert(SizeExpr); |
219 | assert(PtrExpr); |
220 | |
221 | return evaluateStringRepr(Parent, SizeExpr, PtrExpr, Result); |
222 | } |
223 | |
224 | const LangOptions &Context::getLangOpts() const { return Ctx.getLangOpts(); } |
225 | |
226 | static PrimType integralTypeToPrimTypeS(unsigned BitWidth) { |
227 | switch (BitWidth) { |
228 | case 64: |
229 | return PT_Sint64; |
230 | case 32: |
231 | return PT_Sint32; |
232 | case 16: |
233 | return PT_Sint16; |
234 | case 8: |
235 | return PT_Sint8; |
236 | default: |
237 | return PT_IntAPS; |
238 | } |
239 | llvm_unreachable("Unhandled BitWidth" ); |
240 | } |
241 | |
242 | static PrimType integralTypeToPrimTypeU(unsigned BitWidth) { |
243 | switch (BitWidth) { |
244 | case 64: |
245 | return PT_Uint64; |
246 | case 32: |
247 | return PT_Uint32; |
248 | case 16: |
249 | return PT_Uint16; |
250 | case 8: |
251 | return PT_Uint8; |
252 | default: |
253 | return PT_IntAP; |
254 | } |
255 | llvm_unreachable("Unhandled BitWidth" ); |
256 | } |
257 | |
258 | std::optional<PrimType> Context::classify(QualType T) const { |
259 | |
260 | if (const auto *BT = dyn_cast<BuiltinType>(Val: T.getCanonicalType())) { |
261 | auto Kind = BT->getKind(); |
262 | if (Kind == BuiltinType::Bool) |
263 | return PT_Bool; |
264 | if (Kind == BuiltinType::NullPtr) |
265 | return PT_Ptr; |
266 | if (Kind == BuiltinType::BoundMember) |
267 | return PT_MemberPtr; |
268 | |
269 | // Just trying to avoid the ASTContext::getIntWidth call below. |
270 | if (Kind == BuiltinType::Short) |
271 | return integralTypeToPrimTypeS(BitWidth: this->ShortWidth); |
272 | if (Kind == BuiltinType::UShort) |
273 | return integralTypeToPrimTypeU(BitWidth: this->ShortWidth); |
274 | |
275 | if (Kind == BuiltinType::Int) |
276 | return integralTypeToPrimTypeS(BitWidth: this->IntWidth); |
277 | if (Kind == BuiltinType::UInt) |
278 | return integralTypeToPrimTypeU(BitWidth: this->IntWidth); |
279 | if (Kind == BuiltinType::Long) |
280 | return integralTypeToPrimTypeS(BitWidth: this->LongWidth); |
281 | if (Kind == BuiltinType::ULong) |
282 | return integralTypeToPrimTypeU(BitWidth: this->LongWidth); |
283 | if (Kind == BuiltinType::LongLong) |
284 | return integralTypeToPrimTypeS(BitWidth: this->LongLongWidth); |
285 | if (Kind == BuiltinType::ULongLong) |
286 | return integralTypeToPrimTypeU(BitWidth: this->LongLongWidth); |
287 | |
288 | if (Kind == BuiltinType::SChar || Kind == BuiltinType::Char_S) |
289 | return integralTypeToPrimTypeS(BitWidth: 8); |
290 | if (Kind == BuiltinType::UChar || Kind == BuiltinType::Char_U || |
291 | Kind == BuiltinType::Char8) |
292 | return integralTypeToPrimTypeU(BitWidth: 8); |
293 | |
294 | if (BT->isSignedInteger()) |
295 | return integralTypeToPrimTypeS(BitWidth: Ctx.getIntWidth(T)); |
296 | if (BT->isUnsignedInteger()) |
297 | return integralTypeToPrimTypeU(BitWidth: Ctx.getIntWidth(T)); |
298 | |
299 | if (BT->isFloatingPoint()) |
300 | return PT_Float; |
301 | } |
302 | |
303 | if (T->isPointerOrReferenceType()) |
304 | return PT_Ptr; |
305 | |
306 | if (T->isMemberPointerType()) |
307 | return PT_MemberPtr; |
308 | |
309 | if (const auto *BT = T->getAs<BitIntType>()) { |
310 | if (BT->isSigned()) |
311 | return integralTypeToPrimTypeS(BitWidth: BT->getNumBits()); |
312 | return integralTypeToPrimTypeU(BitWidth: BT->getNumBits()); |
313 | } |
314 | |
315 | if (const auto *ET = T->getAs<EnumType>()) { |
316 | const auto *D = ET->getDecl(); |
317 | if (!D->isComplete()) |
318 | return std::nullopt; |
319 | return classify(T: D->getIntegerType()); |
320 | } |
321 | |
322 | if (const auto *AT = T->getAs<AtomicType>()) |
323 | return classify(T: AT->getValueType()); |
324 | |
325 | if (const auto *DT = dyn_cast<DecltypeType>(Val&: T)) |
326 | return classify(T: DT->getUnderlyingType()); |
327 | |
328 | if (T->isObjCObjectPointerType() || T->isBlockPointerType()) |
329 | return PT_Ptr; |
330 | |
331 | if (T->isFixedPointType()) |
332 | return PT_FixedPoint; |
333 | |
334 | // Vector and complex types get here. |
335 | return std::nullopt; |
336 | } |
337 | |
338 | unsigned Context::getCharBit() const { |
339 | return Ctx.getTargetInfo().getCharWidth(); |
340 | } |
341 | |
342 | /// Simple wrapper around getFloatTypeSemantics() to make code a |
343 | /// little shorter. |
344 | const llvm::fltSemantics &Context::getFloatSemantics(QualType T) const { |
345 | return Ctx.getFloatTypeSemantics(T); |
346 | } |
347 | |
348 | bool Context::Run(State &Parent, const Function *Func) { |
349 | |
350 | { |
351 | InterpState State(Parent, *P, Stk, *this, Func); |
352 | if (Interpret(S&: State)) { |
353 | assert(Stk.empty()); |
354 | return true; |
355 | } |
356 | // State gets destroyed here, so the Stk.clear() below doesn't accidentally |
357 | // remove values the State's destructor might access. |
358 | } |
359 | |
360 | Stk.clear(); |
361 | return false; |
362 | } |
363 | |
364 | // TODO: Virtual bases? |
365 | const CXXMethodDecl * |
366 | Context::getOverridingFunction(const CXXRecordDecl *DynamicDecl, |
367 | const CXXRecordDecl *StaticDecl, |
368 | const CXXMethodDecl *InitialFunction) const { |
369 | assert(DynamicDecl); |
370 | assert(StaticDecl); |
371 | assert(InitialFunction); |
372 | |
373 | const CXXRecordDecl *CurRecord = DynamicDecl; |
374 | const CXXMethodDecl *FoundFunction = InitialFunction; |
375 | for (;;) { |
376 | const CXXMethodDecl *Overrider = |
377 | FoundFunction->getCorrespondingMethodDeclaredInClass(RD: CurRecord, MayBeBase: false); |
378 | if (Overrider) |
379 | return Overrider; |
380 | |
381 | // Common case of only one base class. |
382 | if (CurRecord->getNumBases() == 1) { |
383 | CurRecord = CurRecord->bases_begin()->getType()->getAsCXXRecordDecl(); |
384 | continue; |
385 | } |
386 | |
387 | // Otherwise, go to the base class that will lead to the StaticDecl. |
388 | for (const CXXBaseSpecifier &Spec : CurRecord->bases()) { |
389 | const CXXRecordDecl *Base = Spec.getType()->getAsCXXRecordDecl(); |
390 | if (Base == StaticDecl || Base->isDerivedFrom(Base: StaticDecl)) { |
391 | CurRecord = Base; |
392 | break; |
393 | } |
394 | } |
395 | } |
396 | |
397 | llvm_unreachable( |
398 | "Couldn't find an overriding function in the class hierarchy?" ); |
399 | return nullptr; |
400 | } |
401 | |
402 | const Function *Context::getOrCreateFunction(const FunctionDecl *FuncDecl) { |
403 | assert(FuncDecl); |
404 | FuncDecl = FuncDecl->getMostRecentDecl(); |
405 | |
406 | if (const Function *Func = P->getFunction(F: FuncDecl)) |
407 | return Func; |
408 | |
409 | // Manually created functions that haven't been assigned proper |
410 | // parameters yet. |
411 | if (!FuncDecl->param_empty() && !FuncDecl->param_begin()) |
412 | return nullptr; |
413 | |
414 | bool IsLambdaStaticInvoker = false; |
415 | if (const auto *MD = dyn_cast<CXXMethodDecl>(Val: FuncDecl); |
416 | MD && MD->isLambdaStaticInvoker()) { |
417 | // For a lambda static invoker, we might have to pick a specialized |
418 | // version if the lambda is generic. In that case, the picked function |
419 | // will *NOT* be a static invoker anymore. However, it will still |
420 | // be a non-static member function, this (usually) requiring an |
421 | // instance pointer. We suppress that later in this function. |
422 | IsLambdaStaticInvoker = true; |
423 | |
424 | const CXXRecordDecl *ClosureClass = MD->getParent(); |
425 | assert(ClosureClass->captures_begin() == ClosureClass->captures_end()); |
426 | if (ClosureClass->isGenericLambda()) { |
427 | const CXXMethodDecl *LambdaCallOp = ClosureClass->getLambdaCallOperator(); |
428 | assert(MD->isFunctionTemplateSpecialization() && |
429 | "A generic lambda's static-invoker function must be a " |
430 | "template specialization" ); |
431 | const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); |
432 | FunctionTemplateDecl *CallOpTemplate = |
433 | LambdaCallOp->getDescribedFunctionTemplate(); |
434 | void *InsertPos = nullptr; |
435 | const FunctionDecl *CorrespondingCallOpSpecialization = |
436 | CallOpTemplate->findSpecialization(Args: TAL->asArray(), InsertPos); |
437 | assert(CorrespondingCallOpSpecialization); |
438 | FuncDecl = CorrespondingCallOpSpecialization; |
439 | } |
440 | } |
441 | // Set up argument indices. |
442 | unsigned ParamOffset = 0; |
443 | SmallVector<PrimType, 8> ParamTypes; |
444 | SmallVector<unsigned, 8> ParamOffsets; |
445 | llvm::DenseMap<unsigned, Function::ParamDescriptor> ParamDescriptors; |
446 | |
447 | // If the return is not a primitive, a pointer to the storage where the |
448 | // value is initialized in is passed as the first argument. See 'RVO' |
449 | // elsewhere in the code. |
450 | QualType Ty = FuncDecl->getReturnType(); |
451 | bool HasRVO = false; |
452 | if (!Ty->isVoidType() && !classify(T: Ty)) { |
453 | HasRVO = true; |
454 | ParamTypes.push_back(Elt: PT_Ptr); |
455 | ParamOffsets.push_back(Elt: ParamOffset); |
456 | ParamOffset += align(Size: primSize(Type: PT_Ptr)); |
457 | } |
458 | |
459 | // If the function decl is a member decl, the next parameter is |
460 | // the 'this' pointer. This parameter is pop()ed from the |
461 | // InterpStack when calling the function. |
462 | bool HasThisPointer = false; |
463 | if (const auto *MD = dyn_cast<CXXMethodDecl>(Val: FuncDecl)) { |
464 | if (!IsLambdaStaticInvoker) { |
465 | HasThisPointer = MD->isInstance(); |
466 | if (MD->isImplicitObjectMemberFunction()) { |
467 | ParamTypes.push_back(Elt: PT_Ptr); |
468 | ParamOffsets.push_back(Elt: ParamOffset); |
469 | ParamOffset += align(Size: primSize(Type: PT_Ptr)); |
470 | } |
471 | } |
472 | |
473 | if (isLambdaCallOperator(MD)) { |
474 | // The parent record needs to be complete, we need to know about all |
475 | // the lambda captures. |
476 | if (!MD->getParent()->isCompleteDefinition()) |
477 | return nullptr; |
478 | llvm::DenseMap<const ValueDecl *, FieldDecl *> LC; |
479 | FieldDecl *LTC; |
480 | |
481 | MD->getParent()->getCaptureFields(Captures&: LC, ThisCapture&: LTC); |
482 | |
483 | if (MD->isStatic() && !LC.empty()) { |
484 | // Static lambdas cannot have any captures. If this one does, |
485 | // it has already been diagnosed and we can only ignore it. |
486 | return nullptr; |
487 | } |
488 | } |
489 | } |
490 | |
491 | // Assign descriptors to all parameters. |
492 | // Composite objects are lowered to pointers. |
493 | for (const ParmVarDecl *PD : FuncDecl->parameters()) { |
494 | std::optional<PrimType> T = classify(T: PD->getType()); |
495 | PrimType PT = T.value_or(u: PT_Ptr); |
496 | Descriptor *Desc = P->createDescriptor(D: PD, T: PT); |
497 | ParamDescriptors.insert(KV: {ParamOffset, {PT, Desc}}); |
498 | ParamOffsets.push_back(Elt: ParamOffset); |
499 | ParamOffset += align(Size: primSize(Type: PT)); |
500 | ParamTypes.push_back(Elt: PT); |
501 | } |
502 | |
503 | // Create a handle over the emitted code. |
504 | assert(!P->getFunction(FuncDecl)); |
505 | const Function *Func = P->createFunction( |
506 | Def: FuncDecl, Args&: ParamOffset, Args: std::move(ParamTypes), Args: std::move(ParamDescriptors), |
507 | Args: std::move(ParamOffsets), Args&: HasThisPointer, Args&: HasRVO, Args&: IsLambdaStaticInvoker); |
508 | return Func; |
509 | } |
510 | |
511 | const Function *Context::getOrCreateObjCBlock(const BlockExpr *E) { |
512 | const BlockDecl *BD = E->getBlockDecl(); |
513 | // Set up argument indices. |
514 | unsigned ParamOffset = 0; |
515 | SmallVector<PrimType, 8> ParamTypes; |
516 | SmallVector<unsigned, 8> ParamOffsets; |
517 | llvm::DenseMap<unsigned, Function::ParamDescriptor> ParamDescriptors; |
518 | |
519 | // Assign descriptors to all parameters. |
520 | // Composite objects are lowered to pointers. |
521 | for (const ParmVarDecl *PD : BD->parameters()) { |
522 | std::optional<PrimType> T = classify(T: PD->getType()); |
523 | PrimType PT = T.value_or(u: PT_Ptr); |
524 | Descriptor *Desc = P->createDescriptor(D: PD, T: PT); |
525 | ParamDescriptors.insert(KV: {ParamOffset, {PT, Desc}}); |
526 | ParamOffsets.push_back(Elt: ParamOffset); |
527 | ParamOffset += align(Size: primSize(Type: PT)); |
528 | ParamTypes.push_back(Elt: PT); |
529 | } |
530 | |
531 | if (BD->hasCaptures()) |
532 | return nullptr; |
533 | |
534 | // Create a handle over the emitted code. |
535 | Function *Func = |
536 | P->createFunction(Args&: E, Args&: ParamOffset, Args: std::move(ParamTypes), |
537 | Args: std::move(ParamDescriptors), Args: std::move(ParamOffsets), |
538 | /*HasThisPointer=*/Args: false, /*HasRVO=*/Args: false, |
539 | /*IsLambdaStaticInvoker=*/Args: false); |
540 | |
541 | assert(Func); |
542 | Func->setDefined(true); |
543 | // We don't compile the BlockDecl code at all right now. |
544 | Func->setIsFullyCompiled(true); |
545 | return Func; |
546 | } |
547 | |
548 | unsigned Context::collectBaseOffset(const RecordDecl *BaseDecl, |
549 | const RecordDecl *DerivedDecl) const { |
550 | assert(BaseDecl); |
551 | assert(DerivedDecl); |
552 | const auto *FinalDecl = cast<CXXRecordDecl>(Val: BaseDecl); |
553 | const RecordDecl *CurDecl = DerivedDecl; |
554 | const Record *CurRecord = P->getOrCreateRecord(RD: CurDecl); |
555 | assert(CurDecl && FinalDecl); |
556 | |
557 | unsigned OffsetSum = 0; |
558 | for (;;) { |
559 | assert(CurRecord->getNumBases() > 0); |
560 | // One level up |
561 | for (const Record::Base &B : CurRecord->bases()) { |
562 | const auto *BaseDecl = cast<CXXRecordDecl>(Val: B.Decl); |
563 | |
564 | if (BaseDecl == FinalDecl || BaseDecl->isDerivedFrom(Base: FinalDecl)) { |
565 | OffsetSum += B.Offset; |
566 | CurRecord = B.R; |
567 | CurDecl = BaseDecl; |
568 | break; |
569 | } |
570 | } |
571 | if (CurDecl == FinalDecl) |
572 | break; |
573 | } |
574 | |
575 | assert(OffsetSum > 0); |
576 | return OffsetSum; |
577 | } |
578 | |
579 | const Record *Context::getRecord(const RecordDecl *D) const { |
580 | return P->getOrCreateRecord(RD: D); |
581 | } |
582 | |
583 | bool Context::isUnevaluatedBuiltin(unsigned ID) { |
584 | return ID == Builtin::BI__builtin_classify_type || |
585 | ID == Builtin::BI__builtin_os_log_format_buffer_size || |
586 | ID == Builtin::BI__builtin_constant_p || ID == Builtin::BI__noop; |
587 | } |
588 | |