1//===--- Context.cpp - Context for the constexpr VM -------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Context.h"
10#include "Boolean.h"
11#include "ByteCodeEmitter.h"
12#include "Compiler.h"
13#include "EvalEmitter.h"
14#include "Integral.h"
15#include "InterpFrame.h"
16#include "InterpHelpers.h"
17#include "InterpStack.h"
18#include "Pointer.h"
19#include "PrimType.h"
20#include "Program.h"
21#include "clang/AST/ASTLambda.h"
22#include "clang/AST/Expr.h"
23#include "clang/Basic/TargetInfo.h"
24
25using namespace clang;
26using namespace clang::interp;
27
28Context::Context(ASTContext &Ctx) : Ctx(Ctx), P(new Program(*this)) {
29 this->ShortWidth = Ctx.getTargetInfo().getShortWidth();
30 this->IntWidth = Ctx.getTargetInfo().getIntWidth();
31 this->LongWidth = Ctx.getTargetInfo().getLongWidth();
32 this->LongLongWidth = Ctx.getTargetInfo().getLongLongWidth();
33 assert(Ctx.getTargetInfo().getCharWidth() == 8 &&
34 "We're assuming 8 bit chars");
35}
36
37Context::~Context() {}
38
39bool Context::isPotentialConstantExpr(State &Parent, const FunctionDecl *FD) {
40 assert(Stk.empty());
41
42 // Get a function handle.
43 const Function *Func = getOrCreateFunction(FuncDecl: FD);
44 if (!Func)
45 return false;
46
47 // Compile the function.
48 Compiler<ByteCodeEmitter>(*this, *P).compileFunc(
49 FuncDecl: FD, Func: const_cast<Function *>(Func));
50
51 if (!Func->isValid())
52 return false;
53
54 ++EvalID;
55 // And run it.
56 return Run(Parent, Func);
57}
58
59void Context::isPotentialConstantExprUnevaluated(State &Parent, const Expr *E,
60 const FunctionDecl *FD) {
61 assert(Stk.empty());
62 ++EvalID;
63 size_t StackSizeBefore = Stk.size();
64 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
65
66 if (!C.interpretCall(FD, E)) {
67 C.cleanup();
68 Stk.clearTo(NewSize: StackSizeBefore);
69 }
70}
71
72bool Context::evaluateAsRValue(State &Parent, const Expr *E, APValue &Result) {
73 ++EvalID;
74 bool Recursing = !Stk.empty();
75 size_t StackSizeBefore = Stk.size();
76 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
77
78 auto Res = C.interpretExpr(E, /*ConvertResultToRValue=*/E->isGLValue());
79
80 if (Res.isInvalid()) {
81 C.cleanup();
82 Stk.clearTo(NewSize: StackSizeBefore);
83 return false;
84 }
85
86 if (!Recursing) {
87 // We *can* actually get here with a non-empty stack, since
88 // things like InterpState::noteSideEffect() exist.
89 C.cleanup();
90#ifndef NDEBUG
91 // Make sure we don't rely on some value being still alive in
92 // InterpStack memory.
93 Stk.clearTo(StackSizeBefore);
94#endif
95 }
96
97 Result = Res.stealAPValue();
98
99 return true;
100}
101
102bool Context::evaluate(State &Parent, const Expr *E, APValue &Result,
103 ConstantExprKind Kind) {
104 ++EvalID;
105 bool Recursing = !Stk.empty();
106 size_t StackSizeBefore = Stk.size();
107 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
108
109 auto Res = C.interpretExpr(E, /*ConvertResultToRValue=*/false,
110 /*DestroyToplevelScope=*/true);
111 if (Res.isInvalid()) {
112 C.cleanup();
113 Stk.clearTo(NewSize: StackSizeBefore);
114 return false;
115 }
116
117 if (!Recursing) {
118 assert(Stk.empty());
119 C.cleanup();
120#ifndef NDEBUG
121 // Make sure we don't rely on some value being still alive in
122 // InterpStack memory.
123 Stk.clearTo(StackSizeBefore);
124#endif
125 }
126
127 Result = Res.stealAPValue();
128 return true;
129}
130
131bool Context::evaluateAsInitializer(State &Parent, const VarDecl *VD,
132 const Expr *Init, APValue &Result) {
133 ++EvalID;
134 bool Recursing = !Stk.empty();
135 size_t StackSizeBefore = Stk.size();
136 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
137
138 bool CheckGlobalInitialized =
139 shouldBeGloballyIndexed(VD) &&
140 (VD->getType()->isRecordType() || VD->getType()->isArrayType());
141 auto Res = C.interpretDecl(VD, Init, CheckFullyInitialized: CheckGlobalInitialized);
142 if (Res.isInvalid()) {
143 C.cleanup();
144 Stk.clearTo(NewSize: StackSizeBefore);
145
146 return false;
147 }
148
149 if (!Recursing) {
150 assert(Stk.empty());
151 C.cleanup();
152#ifndef NDEBUG
153 // Make sure we don't rely on some value being still alive in
154 // InterpStack memory.
155 Stk.clearTo(StackSizeBefore);
156#endif
157 }
158
159 Result = Res.stealAPValue();
160 return true;
161}
162
163template <typename ResultT>
164bool Context::evaluateStringRepr(State &Parent, const Expr *SizeExpr,
165 const Expr *PtrExpr, ResultT &Result) {
166 assert(Stk.empty());
167 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
168
169 // Evaluate size value.
170 APValue SizeValue;
171 if (!evaluateAsRValue(Parent, E: SizeExpr, Result&: SizeValue))
172 return false;
173
174 if (!SizeValue.isInt())
175 return false;
176 uint64_t Size = SizeValue.getInt().getZExtValue();
177
178 auto PtrRes = C.interpretAsPointer(E: PtrExpr, PtrCB: [&](const Pointer &Ptr) {
179 if (Size == 0) {
180 if constexpr (std::is_same_v<ResultT, APValue>)
181 Result = APValue(APValue::UninitArray{}, 0, 0);
182 return true;
183 }
184
185 if (!Ptr.isLive() || !Ptr.getFieldDesc()->isPrimitiveArray())
186 return false;
187
188 // Must be char.
189 if (Ptr.getFieldDesc()->getElemSize() != 1 /*bytes*/)
190 return false;
191
192 if (Size > Ptr.getNumElems()) {
193 Parent.FFDiag(E: SizeExpr, DiagId: diag::note_constexpr_access_past_end) << AK_Read;
194 Size = Ptr.getNumElems();
195 }
196
197 if constexpr (std::is_same_v<ResultT, APValue>) {
198 QualType CharTy = PtrExpr->getType()->getPointeeType();
199 Result = APValue(APValue::UninitArray{}, Size, Size);
200 for (uint64_t I = 0; I != Size; ++I) {
201 if (std::optional<APValue> ElemVal =
202 Ptr.atIndex(Idx: I).toRValue(Ctx: *this, ResultType: CharTy))
203 Result.getArrayInitializedElt(I) = *ElemVal;
204 else
205 return false;
206 }
207 } else {
208 assert((std::is_same_v<ResultT, std::string>));
209 if (Size < Result.max_size())
210 Result.resize(Size);
211 Result.assign(reinterpret_cast<const char *>(Ptr.getRawAddress()), Size);
212 }
213
214 return true;
215 });
216
217 if (PtrRes.isInvalid()) {
218 C.cleanup();
219 Stk.clear();
220 return false;
221 }
222
223 return true;
224}
225
226bool Context::evaluateCharRange(State &Parent, const Expr *SizeExpr,
227 const Expr *PtrExpr, APValue &Result) {
228 assert(SizeExpr);
229 assert(PtrExpr);
230
231 return evaluateStringRepr(Parent, SizeExpr, PtrExpr, Result);
232}
233
234bool Context::evaluateCharRange(State &Parent, const Expr *SizeExpr,
235 const Expr *PtrExpr, std::string &Result) {
236 assert(SizeExpr);
237 assert(PtrExpr);
238
239 return evaluateStringRepr(Parent, SizeExpr, PtrExpr, Result);
240}
241
242bool Context::evaluateString(State &Parent, const Expr *E,
243 std::string &Result) {
244 assert(Stk.empty());
245 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
246
247 auto PtrRes = C.interpretAsPointer(E, PtrCB: [&](const Pointer &Ptr) {
248 const Descriptor *FieldDesc = Ptr.getFieldDesc();
249 if (!FieldDesc->isPrimitiveArray())
250 return false;
251
252 if (!Ptr.isConst())
253 return false;
254
255 unsigned N = Ptr.getNumElems();
256
257 if (Ptr.elemSize() == 1 /* bytes */) {
258 const char *Chars = reinterpret_cast<const char *>(Ptr.getRawAddress());
259 unsigned Length = strnlen(string: Chars, maxlen: N);
260 // Wasn't null terminated.
261 if (N == Length)
262 return false;
263 Result.assign(s: Chars, n: Length);
264 return true;
265 }
266
267 PrimType ElemT = FieldDesc->getPrimType();
268 for (unsigned I = Ptr.getIndex(); I != N; ++I) {
269 INT_TYPE_SWITCH(ElemT, {
270 auto Elem = Ptr.elem<T>(I);
271 if (Elem.isZero())
272 return true;
273 Result.push_back(static_cast<char>(Elem));
274 });
275 }
276 // We didn't find a 0 byte.
277 return false;
278 });
279
280 if (PtrRes.isInvalid()) {
281 C.cleanup();
282 Stk.clear();
283 return false;
284 }
285 return true;
286}
287
288bool Context::evaluateStrlen(State &Parent, const Expr *E, uint64_t &Result) {
289 assert(Stk.empty());
290 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
291
292 auto PtrRes = C.interpretAsPointer(E, PtrCB: [&](const Pointer &Ptr) {
293 const Descriptor *FieldDesc = Ptr.getFieldDesc();
294 if (!FieldDesc->isPrimitiveArray())
295 return false;
296
297 if (Ptr.isDummy() || Ptr.isUnknownSizeArray() || Ptr.isPastEnd())
298 return false;
299
300 unsigned N = Ptr.getNumElems();
301 if (Ptr.elemSize() == 1) {
302 unsigned Size = N - Ptr.getIndex();
303 Result =
304 strnlen(string: reinterpret_cast<const char *>(Ptr.getRawAddress()), maxlen: Size);
305 return Result != Size;
306 }
307
308 PrimType ElemT = FieldDesc->getPrimType();
309 Result = 0;
310 for (unsigned I = Ptr.getIndex(); I != N; ++I) {
311 INT_TYPE_SWITCH(ElemT, {
312 auto Elem = Ptr.elem<T>(I);
313 if (Elem.isZero())
314 return true;
315 ++Result;
316 });
317 }
318 // We didn't find a 0 byte.
319 return false;
320 });
321
322 if (PtrRes.isInvalid()) {
323 C.cleanup();
324 Stk.clear();
325 return false;
326 }
327 return true;
328}
329
330bool Context::tryEvaluateObjectSize(State &Parent, const Expr *E, unsigned Kind,
331 uint64_t &Result) {
332 assert(Stk.empty());
333 Compiler<EvalEmitter> C(*this, *P, Parent, Stk);
334
335 auto PtrRes = C.interpretAsPointer(E, PtrCB: [&](const Pointer &Ptr) {
336 const Descriptor *DeclDesc = Ptr.getDeclDesc();
337 if (!DeclDesc)
338 return false;
339
340 QualType T = DeclDesc->getType().getNonReferenceType();
341 if (T->isIncompleteType() || T->isFunctionType() ||
342 !T->isConstantSizeType())
343 return false;
344
345 Pointer P = Ptr;
346 if (auto ObjectSize = evaluateBuiltinObjectSize(ASTCtx: getASTContext(), Kind, Ptr&: P)) {
347 Result = *ObjectSize;
348 return true;
349 }
350 return false;
351 });
352
353 if (PtrRes.isInvalid()) {
354 C.cleanup();
355 Stk.clear();
356 return false;
357 }
358 return true;
359}
360
361const LangOptions &Context::getLangOpts() const { return Ctx.getLangOpts(); }
362
363static PrimType integralTypeToPrimTypeS(unsigned BitWidth) {
364 switch (BitWidth) {
365 case 64:
366 return PT_Sint64;
367 case 32:
368 return PT_Sint32;
369 case 16:
370 return PT_Sint16;
371 case 8:
372 return PT_Sint8;
373 default:
374 return PT_IntAPS;
375 }
376 llvm_unreachable("Unhandled BitWidth");
377}
378
379static PrimType integralTypeToPrimTypeU(unsigned BitWidth) {
380 switch (BitWidth) {
381 case 64:
382 return PT_Uint64;
383 case 32:
384 return PT_Uint32;
385 case 16:
386 return PT_Uint16;
387 case 8:
388 return PT_Uint8;
389 default:
390 return PT_IntAP;
391 }
392 llvm_unreachable("Unhandled BitWidth");
393}
394
395OptPrimType Context::classify(QualType T) const {
396
397 if (const auto *BT = dyn_cast<BuiltinType>(Val: T.getCanonicalType())) {
398 auto Kind = BT->getKind();
399 if (Kind == BuiltinType::Bool)
400 return PT_Bool;
401 if (Kind == BuiltinType::NullPtr)
402 return PT_Ptr;
403 if (Kind == BuiltinType::BoundMember)
404 return PT_MemberPtr;
405
406 // Just trying to avoid the ASTContext::getIntWidth call below.
407 if (Kind == BuiltinType::Short)
408 return integralTypeToPrimTypeS(BitWidth: this->ShortWidth);
409 if (Kind == BuiltinType::UShort)
410 return integralTypeToPrimTypeU(BitWidth: this->ShortWidth);
411
412 if (Kind == BuiltinType::Int)
413 return integralTypeToPrimTypeS(BitWidth: this->IntWidth);
414 if (Kind == BuiltinType::UInt)
415 return integralTypeToPrimTypeU(BitWidth: this->IntWidth);
416 if (Kind == BuiltinType::Long)
417 return integralTypeToPrimTypeS(BitWidth: this->LongWidth);
418 if (Kind == BuiltinType::ULong)
419 return integralTypeToPrimTypeU(BitWidth: this->LongWidth);
420 if (Kind == BuiltinType::LongLong)
421 return integralTypeToPrimTypeS(BitWidth: this->LongLongWidth);
422 if (Kind == BuiltinType::ULongLong)
423 return integralTypeToPrimTypeU(BitWidth: this->LongLongWidth);
424
425 if (Kind == BuiltinType::SChar || Kind == BuiltinType::Char_S)
426 return integralTypeToPrimTypeS(BitWidth: 8);
427 if (Kind == BuiltinType::UChar || Kind == BuiltinType::Char_U ||
428 Kind == BuiltinType::Char8)
429 return integralTypeToPrimTypeU(BitWidth: 8);
430
431 if (BT->isSignedInteger())
432 return integralTypeToPrimTypeS(BitWidth: Ctx.getIntWidth(T));
433 if (BT->isUnsignedInteger())
434 return integralTypeToPrimTypeU(BitWidth: Ctx.getIntWidth(T));
435
436 if (BT->isFloatingPoint())
437 return PT_Float;
438 }
439
440 if (T->isPointerOrReferenceType())
441 return PT_Ptr;
442
443 if (T->isMemberPointerType())
444 return PT_MemberPtr;
445
446 if (const auto *BT = T->getAs<BitIntType>()) {
447 if (BT->isSigned())
448 return integralTypeToPrimTypeS(BitWidth: BT->getNumBits());
449 return integralTypeToPrimTypeU(BitWidth: BT->getNumBits());
450 }
451
452 if (const auto *D = T->getAsEnumDecl()) {
453 if (!D->isComplete())
454 return std::nullopt;
455 return classify(T: D->getIntegerType());
456 }
457
458 if (const auto *AT = T->getAs<AtomicType>())
459 return classify(T: AT->getValueType());
460
461 if (const auto *DT = dyn_cast<DecltypeType>(Val&: T))
462 return classify(T: DT->getUnderlyingType());
463
464 if (T->isObjCObjectPointerType() || T->isBlockPointerType())
465 return PT_Ptr;
466
467 if (T->isFixedPointType())
468 return PT_FixedPoint;
469
470 // Vector and complex types get here.
471 return std::nullopt;
472}
473
474unsigned Context::getCharBit() const {
475 return Ctx.getTargetInfo().getCharWidth();
476}
477
478/// Simple wrapper around getFloatTypeSemantics() to make code a
479/// little shorter.
480const llvm::fltSemantics &Context::getFloatSemantics(QualType T) const {
481 return Ctx.getFloatTypeSemantics(T);
482}
483
484bool Context::Run(State &Parent, const Function *Func) {
485 InterpState State(Parent, *P, Stk, *this, Func);
486 if (Interpret(S&: State)) {
487 assert(Stk.empty());
488 return true;
489 }
490 Stk.clear();
491 return false;
492}
493
494// TODO: Virtual bases?
495const CXXMethodDecl *
496Context::getOverridingFunction(const CXXRecordDecl *DynamicDecl,
497 const CXXRecordDecl *StaticDecl,
498 const CXXMethodDecl *InitialFunction) const {
499 assert(DynamicDecl);
500 assert(StaticDecl);
501 assert(InitialFunction);
502
503 const CXXRecordDecl *CurRecord = DynamicDecl;
504 const CXXMethodDecl *FoundFunction = InitialFunction;
505 for (;;) {
506 const CXXMethodDecl *Overrider =
507 FoundFunction->getCorrespondingMethodDeclaredInClass(RD: CurRecord, MayBeBase: false);
508 if (Overrider)
509 return Overrider;
510
511 // Common case of only one base class.
512 if (CurRecord->getNumBases() == 1) {
513 CurRecord = CurRecord->bases_begin()->getType()->getAsCXXRecordDecl();
514 continue;
515 }
516
517 // Otherwise, go to the base class that will lead to the StaticDecl.
518 for (const CXXBaseSpecifier &Spec : CurRecord->bases()) {
519 const CXXRecordDecl *Base = Spec.getType()->getAsCXXRecordDecl();
520 if (Base == StaticDecl || Base->isDerivedFrom(Base: StaticDecl)) {
521 CurRecord = Base;
522 break;
523 }
524 }
525 }
526
527 llvm_unreachable(
528 "Couldn't find an overriding function in the class hierarchy?");
529 return nullptr;
530}
531
532const Function *Context::getOrCreateFunction(const FunctionDecl *FuncDecl) {
533 assert(FuncDecl);
534 if (const Function *Func = P->getFunction(F: FuncDecl))
535 return Func;
536
537 // Manually created functions that haven't been assigned proper
538 // parameters yet.
539 if (!FuncDecl->param_empty() && !FuncDecl->param_begin())
540 return nullptr;
541
542 bool IsLambdaStaticInvoker = false;
543 if (const auto *MD = dyn_cast<CXXMethodDecl>(Val: FuncDecl);
544 MD && MD->isLambdaStaticInvoker()) {
545 // For a lambda static invoker, we might have to pick a specialized
546 // version if the lambda is generic. In that case, the picked function
547 // will *NOT* be a static invoker anymore. However, it will still
548 // be a non-static member function, this (usually) requiring an
549 // instance pointer. We suppress that later in this function.
550 IsLambdaStaticInvoker = true;
551 }
552 // Set up argument indices.
553 unsigned ParamOffset = 0;
554 llvm::SmallVector<Function::ParamDescriptor> ParamDescriptors;
555
556 // If the return is not a primitive, a pointer to the storage where the
557 // value is initialized in is passed as the first argument. See 'RVO'
558 // elsewhere in the code.
559 QualType Ty = FuncDecl->getReturnType();
560 bool HasRVO = false;
561 if (!Ty->isVoidType() && !canClassify(T: Ty)) {
562 HasRVO = true;
563 ParamDescriptors.emplace_back(Args: nullptr, Args&: ParamOffset, Args: PT_Ptr);
564 ParamOffset += align(Size: primSize(Type: PT_Ptr));
565 }
566
567 // If the function decl is a member decl, the next parameter is
568 // the 'this' pointer. This parameter is pop()ed from the
569 // InterpStack when calling the function.
570 bool HasThisPointer = false;
571 if (const auto *MD = dyn_cast<CXXMethodDecl>(Val: FuncDecl)) {
572 if (!IsLambdaStaticInvoker) {
573 HasThisPointer = MD->isInstance();
574 if (MD->isImplicitObjectMemberFunction()) {
575 ParamDescriptors.emplace_back(Args: nullptr, Args&: ParamOffset, Args: PT_Ptr);
576 ParamOffset += align(Size: primSize(Type: PT_Ptr));
577 }
578 }
579
580 if (isLambdaCallOperator(MD)) {
581 // The parent record needs to be complete, we need to know about all
582 // the lambda captures.
583 if (!MD->getParent()->isCompleteDefinition())
584 return nullptr;
585 if (MD->isStatic()) {
586 llvm::DenseMap<const ValueDecl *, FieldDecl *> LC;
587 FieldDecl *LTC;
588
589 MD->getParent()->getCaptureFields(Captures&: LC, ThisCapture&: LTC);
590 // Static lambdas cannot have any captures. If this one does,
591 // it has already been diagnosed and we can only ignore it.
592 if (!LC.empty())
593 return nullptr;
594 }
595 }
596 }
597
598 // Assign descriptors to all parameters.
599 // Composite objects are lowered to pointers.
600 const auto *FuncProto = FuncDecl->getType()->getAs<FunctionProtoType>();
601 for (auto [ParamIndex, PD] : llvm::enumerate(First: FuncDecl->parameters())) {
602 bool IsConst = PD->getType().isConstQualified();
603 bool IsVolatile = PD->getType().isVolatileQualified();
604
605 if (!getASTContext().hasSameType(T1: PD->getType(),
606 T2: FuncProto->getParamType(i: ParamIndex)))
607 return nullptr;
608
609 OptPrimType T = classify(T: PD->getType());
610 PrimType PT = T.value_or(PT: PT_Ptr);
611 Descriptor *Desc = P->createDescriptor(D: PD, T: PT, SourceTy: nullptr, MDSize: std::nullopt,
612 IsConst, /*IsTemporary=*/false,
613 /*IsMutable=*/false, IsVolatile);
614 ParamDescriptors.emplace_back(Args&: Desc, Args&: ParamOffset, Args&: PT);
615 ParamOffset += align(Size: primSize(Type: PT));
616 }
617
618 // Create a handle over the emitted code.
619 assert(!P->getFunction(FuncDecl));
620 const Function *Func =
621 P->createFunction(Def: FuncDecl, Args&: ParamOffset, Args: std::move(ParamDescriptors),
622 Args&: HasThisPointer, Args&: HasRVO, Args&: IsLambdaStaticInvoker);
623 return Func;
624}
625
626const Function *Context::getOrCreateObjCBlock(const BlockExpr *E) {
627 const BlockDecl *BD = E->getBlockDecl();
628 // Set up argument indices.
629 unsigned ParamOffset = 0;
630 llvm::SmallVector<Function::ParamDescriptor> ParamDescriptors;
631
632 // Assign descriptors to all parameters.
633 // Composite objects are lowered to pointers.
634 for (const ParmVarDecl *PD : BD->parameters()) {
635 bool IsConst = PD->getType().isConstQualified();
636 bool IsVolatile = PD->getType().isVolatileQualified();
637
638 OptPrimType T = classify(T: PD->getType());
639 PrimType PT = T.value_or(PT: PT_Ptr);
640 Descriptor *Desc = P->createDescriptor(D: PD, T: PT, SourceTy: nullptr, MDSize: std::nullopt,
641 IsConst, /*IsTemporary=*/false,
642 /*IsMutable=*/false, IsVolatile);
643 ParamDescriptors.emplace_back(Args&: Desc, Args&: ParamOffset, Args&: PT);
644 ParamOffset += align(Size: primSize(Type: PT));
645 }
646
647 if (BD->hasCaptures())
648 return nullptr;
649
650 // Create a handle over the emitted code.
651 Function *Func =
652 P->createFunction(Args&: E, Args&: ParamOffset, Args: std::move(ParamDescriptors),
653 /*HasThisPointer=*/Args: false, /*HasRVO=*/Args: false,
654 /*IsLambdaStaticInvoker=*/Args: false);
655
656 assert(Func);
657 Func->setDefined(true);
658 // We don't compile the BlockDecl code at all right now.
659 Func->setIsFullyCompiled(true);
660
661 return Func;
662}
663
664unsigned Context::collectBaseOffset(const RecordDecl *BaseDecl,
665 const RecordDecl *DerivedDecl) const {
666 assert(BaseDecl);
667 assert(DerivedDecl);
668 const auto *FinalDecl = cast<CXXRecordDecl>(Val: BaseDecl);
669 const RecordDecl *CurDecl = DerivedDecl;
670 const Record *CurRecord = P->getOrCreateRecord(RD: CurDecl);
671 assert(CurDecl && FinalDecl);
672
673 unsigned OffsetSum = 0;
674 for (;;) {
675 assert(CurRecord->getNumBases() > 0);
676 // One level up
677 for (const Record::Base &B : CurRecord->bases()) {
678 const auto *BaseDecl = cast<CXXRecordDecl>(Val: B.Decl);
679
680 if (BaseDecl == FinalDecl || BaseDecl->isDerivedFrom(Base: FinalDecl)) {
681 OffsetSum += B.Offset;
682 CurRecord = B.R;
683 CurDecl = BaseDecl;
684 break;
685 }
686 }
687 if (CurDecl == FinalDecl)
688 break;
689 }
690
691 assert(OffsetSum > 0);
692 return OffsetSum;
693}
694
695const Record *Context::getRecord(const RecordDecl *D) const {
696 return P->getOrCreateRecord(RD: D);
697}
698
699bool Context::isUnevaluatedBuiltin(unsigned ID) {
700 return ID == Builtin::BI__builtin_classify_type ||
701 ID == Builtin::BI__builtin_os_log_format_buffer_size ||
702 ID == Builtin::BI__builtin_constant_p || ID == Builtin::BI__noop;
703}
704