1//===- Interpreter.cpp - Interpreter Loop for llubi -----------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the evaluation loop for each kind of instruction.
10//
11//===----------------------------------------------------------------------===//
12
13#include "Context.h"
14#include "Value.h"
15#include "llvm/IR/GetElementPtrTypeIterator.h"
16#include "llvm/IR/InlineAsm.h"
17#include "llvm/IR/InstVisitor.h"
18#include "llvm/IR/Operator.h"
19#include "llvm/IR/PatternMatch.h"
20#include "llvm/Support/Allocator.h"
21
22namespace llvm::ubi {
23
24using namespace PatternMatch;
25
26enum class FrameState {
27 // It is about to enter the function.
28 // Valid transition:
29 // -> Running
30 Entry,
31 // It is executing instructions inside the function.
32 // Valid transitions:
33 // -> Pending (on call)
34 // -> Exit (on return)
35 Running,
36 // It is about to enter a callee or handle return value from the callee.
37 // Valid transitions:
38 // -> Running (after returning from callee)
39 Pending,
40 // It is about to return the control to the caller.
41 Exit,
42};
43
44/// Context for a function call.
45/// This struct maintains the state during the execution of a function,
46/// including the control flow, values of executed instructions, and stack
47/// objects.
48struct Frame {
49 Function &Func;
50 Frame *LastFrame;
51 CallBase *CallSite;
52 ArrayRef<AnyValue> Args;
53 AnyValue &RetVal;
54
55 TargetLibraryInfo TLI;
56 BasicBlock *BB;
57 BasicBlock::iterator PC;
58 FrameState State = FrameState::Entry;
59 // Stack objects allocated in this frame. They will be automatically freed
60 // when the function returns.
61 SmallVector<IntrusiveRefCntPtr<MemoryObject>> Allocas;
62 // Values of arguments and executed instructions in this function.
63 DenseMap<Value *, AnyValue> ValueMap;
64
65 // Reserved for in-flight subroutines.
66 Function *ResolvedCallee = nullptr;
67 SmallVector<AnyValue> CalleeArgs;
68 AnyValue CalleeRetVal;
69
70 Frame(Function &F, CallBase *CallSite, Frame *LastFrame,
71 ArrayRef<AnyValue> Args, AnyValue &RetVal,
72 const TargetLibraryInfoImpl &TLIImpl)
73 : Func(F), LastFrame(LastFrame), CallSite(CallSite), Args(Args),
74 RetVal(RetVal), TLI(TLIImpl, &F) {
75 assert((Args.size() == F.arg_size() ||
76 (F.isVarArg() && Args.size() >= F.arg_size())) &&
77 "Expected enough arguments to call the function.");
78 BB = &Func.getEntryBlock();
79 PC = BB->begin();
80 for (Argument &Arg : F.args())
81 ValueMap[&Arg] = Args[Arg.getArgNo()];
82 }
83};
84
85static AnyValue addNoWrap(const APInt &LHS, const APInt &RHS, bool HasNSW,
86 bool HasNUW) {
87 APInt Res = LHS + RHS;
88 if (HasNUW && Res.ult(RHS))
89 return AnyValue::poison();
90 if (HasNSW && LHS.isNonNegative() == RHS.isNonNegative() &&
91 LHS.isNonNegative() != Res.isNonNegative())
92 return AnyValue::poison();
93 return Res;
94}
95
96static AnyValue subNoWrap(const APInt &LHS, const APInt &RHS, bool HasNSW,
97 bool HasNUW) {
98 APInt Res = LHS - RHS;
99 if (HasNUW && Res.ugt(RHS: LHS))
100 return AnyValue::poison();
101 if (HasNSW && LHS.isNonNegative() != RHS.isNonNegative() &&
102 LHS.isNonNegative() != Res.isNonNegative())
103 return AnyValue::poison();
104 return Res;
105}
106
107static AnyValue mulNoWrap(const APInt &LHS, const APInt &RHS, bool HasNSW,
108 bool HasNUW) {
109 bool Overflow = false;
110 APInt Res = LHS.smul_ov(RHS, Overflow);
111 if (HasNSW && Overflow)
112 return AnyValue::poison();
113 if (HasNUW) {
114 (void)LHS.umul_ov(RHS, Overflow);
115 if (Overflow)
116 return AnyValue::poison();
117 }
118 return Res;
119}
120
121/// Instruction executor using the visitor pattern.
122/// Unlike the Context class that manages the global state,
123/// InstExecutor only maintains the state for call frames.
124class InstExecutor : public InstVisitor<InstExecutor, void> {
125 Context &Ctx;
126 const DataLayout &DL;
127 EventHandler &Handler;
128 std::list<Frame> CallStack;
129 // Used to indicate whether the interpreter should continue execution.
130 bool Status;
131 Frame *CurrentFrame = nullptr;
132 AnyValue None;
133
134 void reportImmediateUB(StringRef Msg) {
135 // Check if we have already reported an immediate UB.
136 if (!Status)
137 return;
138 Status = false;
139 // TODO: Provide stack trace information.
140 Handler.onImmediateUB(Msg);
141 }
142
143 void reportError(StringRef Msg) {
144 // Check if we have already reported an error message.
145 if (!Status)
146 return;
147 Status = false;
148 Handler.onError(Msg);
149 }
150
151 const AnyValue &getValue(Value *V) {
152 if (auto *C = dyn_cast<Constant>(Val: V))
153 return Ctx.getConstantValue(C);
154 return CurrentFrame->ValueMap.at(Val: V);
155 }
156
157 void setResult(Instruction &I, AnyValue V) {
158 if (Status)
159 Status &= Handler.onInstructionExecuted(I, Result: V);
160 CurrentFrame->ValueMap.insert_or_assign(Key: &I, Val: std::move(V));
161 }
162
163 AnyValue computeUnOp(Type *Ty, const AnyValue &Operand,
164 function_ref<AnyValue(const AnyValue &)> ScalarFn) {
165 if (Ty->isVectorTy()) {
166 auto &OperandVec = Operand.asAggregate();
167 std::vector<AnyValue> ResVec;
168 ResVec.reserve(n: OperandVec.size());
169 for (const auto &Scalar : OperandVec)
170 ResVec.push_back(x: ScalarFn(Scalar));
171 return std::move(ResVec);
172 }
173 return ScalarFn(Operand);
174 }
175
176 void visitUnOp(Instruction &I,
177 function_ref<AnyValue(const AnyValue &)> ScalarFn) {
178 setResult(I, V: computeUnOp(Ty: I.getType(), Operand: getValue(V: I.getOperand(i: 0)), ScalarFn));
179 }
180
181 void visitIntUnOp(Instruction &I,
182 function_ref<AnyValue(const APInt &)> ScalarFn) {
183 visitUnOp(I, ScalarFn: [&](const AnyValue &Operand) -> AnyValue {
184 if (Operand.isPoison())
185 return AnyValue::poison();
186 return ScalarFn(Operand.asInteger());
187 });
188 }
189
190 AnyValue computeBinOp(
191 Type *Ty, const AnyValue &LHS, const AnyValue &RHS,
192 function_ref<AnyValue(const AnyValue &, const AnyValue &)> ScalarFn) {
193 if (Ty->isVectorTy()) {
194 auto &LHSVec = LHS.asAggregate();
195 auto &RHSVec = RHS.asAggregate();
196 std::vector<AnyValue> ResVec;
197 ResVec.reserve(n: LHSVec.size());
198 for (const auto &[ScalarLHS, ScalarRHS] : zip(t: LHSVec, u: RHSVec))
199 ResVec.push_back(x: ScalarFn(ScalarLHS, ScalarRHS));
200 return std::move(ResVec);
201 }
202 return ScalarFn(LHS, RHS);
203 }
204
205 void visitBinOp(
206 Instruction &I,
207 function_ref<AnyValue(const AnyValue &, const AnyValue &)> ScalarFn) {
208 setResult(I, V: computeBinOp(Ty: I.getType(), LHS: getValue(V: I.getOperand(i: 0)),
209 RHS: getValue(V: I.getOperand(i: 1)), ScalarFn));
210 }
211
212 void
213 visitIntBinOp(Instruction &I,
214 function_ref<AnyValue(const APInt &, const APInt &)> ScalarFn) {
215 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
216 if (LHS.isPoison() || RHS.isPoison())
217 return AnyValue::poison();
218 return ScalarFn(LHS.asInteger(), RHS.asInteger());
219 });
220 }
221
222 void jumpTo(Instruction &Terminator, BasicBlock *DestBB) {
223 if (!Handler.onBBJump(I&: Terminator, To&: *DestBB)) {
224 Status = false;
225 return;
226 }
227 BasicBlock *From = CurrentFrame->BB;
228 CurrentFrame->BB = DestBB;
229 CurrentFrame->PC = DestBB->begin();
230 // Update PHI nodes in batch to avoid the interference between PHI nodes.
231 // We need to store the incoming values into a temporary buffer.
232 // Otherwise, the incoming value may be overwritten before it is
233 // used by other PHI nodes.
234 SmallVector<std::pair<PHINode *, AnyValue>> IncomingValues;
235 PHINode *PHI = nullptr;
236 while ((PHI = dyn_cast<PHINode>(Val&: CurrentFrame->PC))) {
237 Value *Incoming = PHI->getIncomingValueForBlock(BB: From);
238 // TODO: handle fast-math flags.
239 IncomingValues.emplace_back(Args&: PHI, Args: getValue(V: Incoming));
240 ++CurrentFrame->PC;
241 }
242 for (auto &[K, V] : IncomingValues)
243 setResult(I&: *K, V: std::move(V));
244 }
245
246 /// Helper function to determine whether an inline asm is a no-op, which is
247 /// used to implement black_box style optimization blockers.
248 bool isNoopInlineAsm(Value *V, Type *RetTy) {
249 if (auto *Asm = dyn_cast<InlineAsm>(Val: V))
250 return Asm->getAsmString().empty() && RetTy->isVoidTy();
251 return false;
252 }
253
254 /// Check if the upcoming memory access is valid. Returns the offset relative
255 /// to the underlying object if it is valid.
256 std::optional<uint64_t> verifyMemAccess(const MemoryObject &MO,
257 const APInt &Address,
258 uint64_t AccessSize, Align Alignment,
259 bool IsStore) {
260 // Loading from a stack object outside its lifetime is not undefined
261 // behavior and returns a poison value instead. Storing to it is still
262 // undefined behavior.
263 if (IsStore ? MO.getState() != MemoryObjectState::Alive
264 : MO.getState() == MemoryObjectState::Freed) {
265 reportImmediateUB(Msg: "Try to access a dead memory object.");
266 return std::nullopt;
267 }
268
269 if (Address.countr_zero() < Log2(A: Alignment)) {
270 reportImmediateUB(Msg: "Misaligned memory access.");
271 return std::nullopt;
272 }
273
274 if (AccessSize > MO.getSize() || Address.ult(RHS: MO.getAddress())) {
275 reportImmediateUB(Msg: "Memory access is out of bounds.");
276 return std::nullopt;
277 }
278
279 APInt Offset = Address - MO.getAddress();
280
281 if (Offset.ugt(RHS: MO.getSize() - AccessSize)) {
282 reportImmediateUB(Msg: "Memory access is out of bounds.");
283 return std::nullopt;
284 }
285
286 return Offset.getZExtValue();
287 }
288
289 AnyValue load(const AnyValue &Ptr, Align Alignment, Type *ValTy) {
290 if (Ptr.isPoison()) {
291 reportImmediateUB(Msg: "Invalid memory access with a poison pointer.");
292 return AnyValue::getPoisonValue(Ctx, Ty: ValTy);
293 }
294 auto &PtrVal = Ptr.asPointer();
295 auto *MO = PtrVal.getMemoryObject();
296 if (!MO) {
297 reportImmediateUB(
298 Msg: "Invalid memory access via a pointer with nullary provenance.");
299 return AnyValue::getPoisonValue(Ctx, Ty: ValTy);
300 }
301 // TODO: pointer capability check
302 if (auto Offset =
303 verifyMemAccess(MO: *MO, Address: PtrVal.address(),
304 AccessSize: Ctx.getEffectiveTypeStoreSize(Ty: ValTy), Alignment,
305 /*IsStore=*/false)) {
306 // Load from a dead stack object yields poison value.
307 if (MO->getState() == MemoryObjectState::Dead)
308 return AnyValue::getPoisonValue(Ctx, Ty: ValTy);
309
310 return Ctx.load(MO&: *MO, Offset: *Offset, ValTy);
311 }
312 return AnyValue::getPoisonValue(Ctx, Ty: ValTy);
313 }
314
315 void store(const AnyValue &Ptr, Align Alignment, const AnyValue &Val,
316 Type *ValTy) {
317 if (Ptr.isPoison()) {
318 reportImmediateUB(Msg: "Invalid memory access with a poison pointer.");
319 return;
320 }
321 auto &PtrVal = Ptr.asPointer();
322 auto *MO = PtrVal.getMemoryObject();
323 if (!MO) {
324 reportImmediateUB(
325 Msg: "Invalid memory access via a pointer with nullary provenance.");
326 return;
327 }
328 // TODO: pointer capability check
329 if (auto Offset =
330 verifyMemAccess(MO: *MO, Address: PtrVal.address(),
331 AccessSize: Ctx.getEffectiveTypeStoreSize(Ty: ValTy), Alignment,
332 /*IsStore=*/true))
333 Ctx.store(MO&: *MO, Offset: *Offset, Val, ValTy);
334 }
335
336 AnyValue computePtrAdd(const Pointer &Ptr, const APInt &Offset,
337 GEPNoWrapFlags Flags, AnyValue &AccumulatedOffset) {
338 if (Offset.isZero())
339 return Ptr;
340 APInt IndexBits = Ptr.address().trunc(width: Offset.getBitWidth());
341 auto NewIndex = addNoWrap(LHS: IndexBits, RHS: Offset, /*HasNSW=*/false,
342 HasNUW: Flags.hasNoUnsignedWrap());
343 if (NewIndex.isPoison())
344 return AnyValue::poison();
345 if (Flags.hasNoUnsignedSignedWrap()) {
346 // The successive addition of the current address, truncated to the
347 // pointer index type and interpreted as an unsigned number, and each
348 // offset, interpreted as a signed number, does not wrap the pointer index
349 // type.
350 if (Offset.isNonNegative() ? NewIndex.asInteger().ult(RHS: IndexBits)
351 : NewIndex.asInteger().ugt(RHS: IndexBits))
352 return AnyValue::poison();
353 }
354 APInt NewAddr = Ptr.address();
355 NewAddr.insertBits(SubBits: NewIndex.asInteger(), bitPosition: 0);
356
357 auto *MO = Ptr.getMemoryObject();
358 if (Flags.isInBounds() && (!MO || !MO->inBounds(NewAddr)))
359 return AnyValue::poison();
360
361 if (!AccumulatedOffset.isPoison()) {
362 AccumulatedOffset =
363 addNoWrap(LHS: AccumulatedOffset.asInteger(), RHS: Offset,
364 HasNSW: Flags.hasNoUnsignedSignedWrap(), HasNUW: Flags.hasNoUnsignedWrap());
365 if (AccumulatedOffset.isPoison())
366 return AnyValue::poison();
367 }
368
369 // Should not expose provenance here even if the new address doesn't point
370 // to the original object.
371 return Ptr.getWithNewAddr(NewAddr);
372 }
373
374 AnyValue computePtrAdd(const AnyValue &Ptr, const APInt &Offset,
375 GEPNoWrapFlags Flags, AnyValue &AccumulatedOffset) {
376 if (Ptr.isPoison())
377 return AnyValue::poison();
378 return computePtrAdd(Ptr: Ptr.asPointer(), Offset, Flags, AccumulatedOffset);
379 }
380
381 AnyValue computeScaledPtrAdd(const AnyValue &Ptr, const AnyValue &Index,
382 const APInt &Scale, GEPNoWrapFlags Flags,
383 AnyValue &AccumulatedOffset) {
384 if (Ptr.isPoison() || Index.isPoison())
385 return AnyValue::poison();
386 assert(Ptr.isPointer() && Index.isInteger() && "Unexpected type.");
387 if (Scale.isOne())
388 return computePtrAdd(Ptr, Offset: Index.asInteger(), Flags, AccumulatedOffset);
389 auto ScaledOffset =
390 mulNoWrap(LHS: Index.asInteger(), RHS: Scale, HasNSW: Flags.hasNoUnsignedSignedWrap(),
391 HasNUW: Flags.hasNoUnsignedWrap());
392 if (ScaledOffset.isPoison())
393 return AnyValue::poison();
394 return computePtrAdd(Ptr, Offset: ScaledOffset.asInteger(), Flags,
395 AccumulatedOffset);
396 }
397
398 AnyValue canonicalizeIndex(const AnyValue &Idx, unsigned IndexBitWidth,
399 GEPNoWrapFlags Flags) {
400 if (Idx.isPoison())
401 return AnyValue::poison();
402 auto &IdxInt = Idx.asInteger();
403 if (IdxInt.getBitWidth() == IndexBitWidth)
404 return Idx;
405 if (IdxInt.getBitWidth() > IndexBitWidth) {
406 if (Flags.hasNoUnsignedSignedWrap() &&
407 !IdxInt.isSignedIntN(N: IndexBitWidth))
408 return AnyValue::poison();
409
410 if (Flags.hasNoUnsignedWrap() && !IdxInt.isIntN(N: IndexBitWidth))
411 return AnyValue::poison();
412
413 return IdxInt.trunc(width: IndexBitWidth);
414 }
415 return IdxInt.sext(width: IndexBitWidth);
416 }
417
418public:
419 InstExecutor(Context &C, EventHandler &H, Function &F,
420 ArrayRef<AnyValue> Args, AnyValue &RetVal)
421 : Ctx(C), DL(Ctx.getDataLayout()), Handler(H), Status(true) {
422 CallStack.emplace_back(args&: F, /*CallSite=*/args: nullptr, /*LastFrame=*/args: nullptr, args&: Args,
423 args&: RetVal, args: Ctx.getTLIImpl());
424 }
425
426 void visitReturnInst(ReturnInst &RI) {
427 if (auto *RV = RI.getReturnValue())
428 CurrentFrame->RetVal = getValue(V: RV);
429 CurrentFrame->State = FrameState::Exit;
430 Status &= Handler.onInstructionExecuted(I&: RI, Result: None);
431 }
432
433 void visitBranchInst(BranchInst &BI) {
434 if (BI.isConditional()) {
435 switch (getValue(V: BI.getCondition()).asBoolean()) {
436 case BooleanKind::True:
437 jumpTo(Terminator&: BI, DestBB: BI.getSuccessor(Idx: 0));
438 return;
439 case BooleanKind::False:
440 jumpTo(Terminator&: BI, DestBB: BI.getSuccessor(Idx: 1));
441 return;
442 case BooleanKind::Poison:
443 reportImmediateUB(Msg: "Branch on poison condition.");
444 return;
445 }
446 }
447 jumpTo(Terminator&: BI, DestBB: BI.getSuccessor(Idx: 0));
448 }
449
450 void visitSwitchInst(SwitchInst &SI) {
451 auto &Cond = getValue(V: SI.getCondition());
452 if (Cond.isPoison()) {
453 reportImmediateUB(Msg: "Switch on poison condition.");
454 return;
455 }
456 for (auto &Case : SI.cases()) {
457 if (Case.getCaseValue()->getValue() == Cond.asInteger()) {
458 jumpTo(Terminator&: SI, DestBB: Case.getCaseSuccessor());
459 return;
460 }
461 }
462 jumpTo(Terminator&: SI, DestBB: SI.getDefaultDest());
463 }
464
465 void visitUnreachableInst(UnreachableInst &) {
466 reportImmediateUB(Msg: "Unreachable code.");
467 }
468
469 void visitCallBrInst(CallBrInst &CI) {
470 if (isNoopInlineAsm(V: CI.getCalledOperand(), RetTy: CI.getType())) {
471 jumpTo(Terminator&: CI, DestBB: CI.getDefaultDest());
472 return;
473 }
474
475 Handler.onUnrecognizedInstruction(I&: CI);
476 Status = false;
477 }
478
479 void visitIndirectBrInst(IndirectBrInst &IBI) {
480 auto &Target = getValue(V: IBI.getAddress());
481 if (Target.isPoison()) {
482 reportImmediateUB(Msg: "Indirect branch on poison.");
483 return;
484 }
485 if (BasicBlock *DestBB = Ctx.getTargetBlock(Ptr: Target.asPointer())) {
486 if (any_of(Range: IBI.successors(),
487 P: [DestBB](BasicBlock *Succ) { return Succ == DestBB; }))
488 jumpTo(Terminator&: IBI, DestBB);
489 else
490 reportImmediateUB(Msg: "Indirect branch on unlisted target BB.");
491
492 return;
493 }
494 reportImmediateUB(Msg: "Indirect branch on invalid target BB.");
495 }
496
497 void returnFromCallee() {
498 // TODO: handle retval attributes (Attributes from known callee should be
499 // applied if available).
500 // TODO: handle metadata
501 auto &CB = cast<CallBase>(Val&: *CurrentFrame->PC);
502 CurrentFrame->CalleeArgs.clear();
503 AnyValue &RetVal = CurrentFrame->CalleeRetVal;
504 setResult(I&: CB, V: std::move(RetVal));
505
506 if (auto *II = dyn_cast<InvokeInst>(Val: &CB))
507 jumpTo(Terminator&: *II, DestBB: II->getNormalDest());
508 else if (CurrentFrame->State == FrameState::Pending)
509 ++CurrentFrame->PC;
510 }
511
512 AnyValue callIntrinsic(CallBase &CB) {
513 Intrinsic::ID IID = CB.getIntrinsicID();
514 switch (IID) {
515 case Intrinsic::assume:
516 switch (getValue(V: CB.getArgOperand(i: 0)).asBoolean()) {
517 case BooleanKind::True:
518 break;
519 case BooleanKind::False:
520 case BooleanKind::Poison:
521 reportImmediateUB(Msg: "Assume on false or poison condition.");
522 break;
523 }
524 // TODO: handle llvm.assume with operand bundles
525 return AnyValue();
526 case Intrinsic::lifetime_start:
527 case Intrinsic::lifetime_end: {
528 auto *Ptr = CB.getArgOperand(i: 0);
529 if (isa<PoisonValue>(Val: Ptr))
530 return AnyValue();
531 auto *MO = getValue(V: Ptr).asPointer().getMemoryObject();
532 assert(MO && "Memory object accessed by lifetime intrinsic should be "
533 "always valid.");
534 if (IID == Intrinsic::lifetime_start) {
535 MO->setState(MemoryObjectState::Alive);
536 fill(Range: MO->getBytes(), Value: Byte::undef());
537 } else {
538 MO->setState(MemoryObjectState::Dead);
539 }
540 return AnyValue();
541 }
542 default:
543 Handler.onUnrecognizedInstruction(I&: CB);
544 Status = false;
545 return AnyValue();
546 }
547 }
548
549 AnyValue callLibFunc(CallBase &CB, Function *ResolvedCallee) {
550 LibFunc LF;
551 // Respect nobuiltin attributes on call site.
552 if (CB.isNoBuiltin() ||
553 !CurrentFrame->TLI.getLibFunc(FDecl: *ResolvedCallee, F&: LF)) {
554 Handler.onUnrecognizedInstruction(I&: CB);
555 Status = false;
556 return AnyValue();
557 }
558
559 Handler.onUnrecognizedInstruction(I&: CB);
560 Status = false;
561 return AnyValue();
562 }
563
564 void enterCall(CallBase &CB) {
565 Function *Callee = CB.getCalledFunction();
566 // TODO: handle parameter attributes (Attributes from known callee should be
567 // applied if available).
568 // TODO: handle byval/initializes
569 auto &CalleeArgs = CurrentFrame->CalleeArgs;
570 assert(CalleeArgs.empty() &&
571 "Forgot to call returnFromCallee before entering a new call.");
572 for (Value *Arg : CB.args())
573 CalleeArgs.push_back(Elt: getValue(V: Arg));
574
575 if (!Callee) {
576 Value *CalledOperand = CB.getCalledOperand();
577 if (isNoopInlineAsm(V: CalledOperand, RetTy: CB.getType())) {
578 CurrentFrame->ResolvedCallee = nullptr;
579 returnFromCallee();
580 return;
581 }
582
583 if (isa<InlineAsm>(Val: CalledOperand)) {
584 Handler.onUnrecognizedInstruction(I&: CB);
585 Status = false;
586 return;
587 }
588
589 auto &CalleeVal = getValue(V: CalledOperand);
590 if (CalleeVal.isPoison()) {
591 reportImmediateUB(Msg: "Indirect call through poison function pointer.");
592 return;
593 }
594 Callee = Ctx.getTargetFunction(Ptr: CalleeVal.asPointer());
595 if (!Callee) {
596 reportImmediateUB(Msg: "Indirect call through invalid function pointer.");
597 return;
598 }
599 if (Callee->getFunctionType() != CB.getFunctionType()) {
600 reportImmediateUB(Msg: "Indirect call through a function pointer with "
601 "mismatched signature.");
602 return;
603 }
604 }
605
606 assert(Callee && "Expected a resolved callee function.");
607 assert(
608 Callee->getFunctionType() == CB.getFunctionType() &&
609 "Expected the callee function type to match the call site signature.");
610 CurrentFrame->ResolvedCallee = Callee;
611 if (Callee->isIntrinsic()) {
612 CurrentFrame->CalleeRetVal = callIntrinsic(CB);
613 returnFromCallee();
614 return;
615 } else if (Callee->isDeclaration()) {
616 CurrentFrame->CalleeRetVal = callLibFunc(CB, ResolvedCallee: Callee);
617 returnFromCallee();
618 return;
619 } else {
620 uint32_t MaxStackDepth = Ctx.getMaxStackDepth();
621 if (MaxStackDepth && CallStack.size() >= MaxStackDepth) {
622 reportError(Msg: "Maximum stack depth exceeded.");
623 return;
624 }
625 assert(!Callee->empty() && "Expected a defined function.");
626 // Suspend the current frame and push the callee frame onto the stack.
627 ArrayRef<AnyValue> Args = CurrentFrame->CalleeArgs;
628 AnyValue &RetVal = CurrentFrame->CalleeRetVal;
629 CurrentFrame->State = FrameState::Pending;
630 CallStack.emplace_back(args&: *Callee, args: &CB, args&: CurrentFrame, args&: Args, args&: RetVal,
631 args: Ctx.getTLIImpl());
632 }
633 }
634
635 void visitCallInst(CallInst &CI) { enterCall(CB&: CI); }
636
637 void visitInvokeInst(InvokeInst &II) {
638 // TODO: handle exceptions
639 enterCall(CB&: II);
640 }
641
642 void visitAdd(BinaryOperator &I) {
643 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) {
644 return addNoWrap(LHS, RHS, HasNSW: I.hasNoSignedWrap(), HasNUW: I.hasNoUnsignedWrap());
645 });
646 }
647
648 void visitSub(BinaryOperator &I) {
649 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) {
650 return subNoWrap(LHS, RHS, HasNSW: I.hasNoSignedWrap(), HasNUW: I.hasNoUnsignedWrap());
651 });
652 }
653
654 void visitMul(BinaryOperator &I) {
655 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) {
656 return mulNoWrap(LHS, RHS, HasNSW: I.hasNoSignedWrap(), HasNUW: I.hasNoUnsignedWrap());
657 });
658 }
659
660 void visitSDiv(BinaryOperator &I) {
661 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
662 // Priority: Immediate UB > poison > normal value
663 if (RHS.isPoison()) {
664 reportImmediateUB(Msg: "Division by zero (refine RHS to 0).");
665 return AnyValue::poison();
666 }
667 const APInt &RHSVal = RHS.asInteger();
668 if (RHSVal.isZero()) {
669 reportImmediateUB(Msg: "Division by zero.");
670 return AnyValue::poison();
671 }
672 if (LHS.isPoison()) {
673 if (RHSVal.isAllOnes())
674 reportImmediateUB(
675 Msg: "Signed division overflow (refine LHS to INT_MIN).");
676 return AnyValue::poison();
677 }
678 const APInt &LHSVal = LHS.asInteger();
679 if (LHSVal.isMinSignedValue() && RHSVal.isAllOnes()) {
680 reportImmediateUB(Msg: "Signed division overflow.");
681 return AnyValue::poison();
682 }
683
684 if (I.isExact()) {
685 APInt Q, R;
686 APInt::sdivrem(LHS: LHSVal, RHS: RHSVal, Quotient&: Q, Remainder&: R);
687 if (!R.isZero())
688 return AnyValue::poison();
689 return Q;
690 } else {
691 return LHSVal.sdiv(RHS: RHSVal);
692 }
693 });
694 }
695
696 void visitSRem(BinaryOperator &I) {
697 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
698 // Priority: Immediate UB > poison > normal value
699 if (RHS.isPoison()) {
700 reportImmediateUB(Msg: "Division by zero (refine RHS to 0).");
701 return AnyValue::poison();
702 }
703 const APInt &RHSVal = RHS.asInteger();
704 if (RHSVal.isZero()) {
705 reportImmediateUB(Msg: "Division by zero.");
706 return AnyValue::poison();
707 }
708 if (LHS.isPoison()) {
709 if (RHSVal.isAllOnes())
710 reportImmediateUB(
711 Msg: "Signed division overflow (refine LHS to INT_MIN).");
712 return AnyValue::poison();
713 }
714 const APInt &LHSVal = LHS.asInteger();
715 if (LHSVal.isMinSignedValue() && RHSVal.isAllOnes()) {
716 reportImmediateUB(Msg: "Signed division overflow.");
717 return AnyValue::poison();
718 }
719
720 return LHSVal.srem(RHS: RHSVal);
721 });
722 }
723
724 void visitUDiv(BinaryOperator &I) {
725 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
726 // Priority: Immediate UB > poison > normal value
727 if (RHS.isPoison()) {
728 reportImmediateUB(Msg: "Division by zero (refine RHS to 0).");
729 return AnyValue::poison();
730 }
731 const APInt &RHSVal = RHS.asInteger();
732 if (RHSVal.isZero()) {
733 reportImmediateUB(Msg: "Division by zero.");
734 return AnyValue::poison();
735 }
736 if (LHS.isPoison())
737 return AnyValue::poison();
738 const APInt &LHSVal = LHS.asInteger();
739
740 if (I.isExact()) {
741 APInt Q, R;
742 APInt::udivrem(LHS: LHSVal, RHS: RHSVal, Quotient&: Q, Remainder&: R);
743 if (!R.isZero())
744 return AnyValue::poison();
745 return Q;
746 } else {
747 return LHSVal.udiv(RHS: RHSVal);
748 }
749 });
750 }
751
752 void visitURem(BinaryOperator &I) {
753 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
754 // Priority: Immediate UB > poison > normal value
755 if (RHS.isPoison()) {
756 reportImmediateUB(Msg: "Division by zero (refine RHS to 0).");
757 return AnyValue::poison();
758 }
759 const APInt &RHSVal = RHS.asInteger();
760 if (RHSVal.isZero()) {
761 reportImmediateUB(Msg: "Division by zero.");
762 return AnyValue::poison();
763 }
764 if (LHS.isPoison())
765 return AnyValue::poison();
766 const APInt &LHSVal = LHS.asInteger();
767 return LHSVal.urem(RHS: RHSVal);
768 });
769 }
770
771 void visitTruncInst(TruncInst &Trunc) {
772 visitIntUnOp(I&: Trunc, ScalarFn: [&](const APInt &Operand) -> AnyValue {
773 unsigned DestBW = Trunc.getType()->getScalarSizeInBits();
774 if (Trunc.hasNoSignedWrap() && Operand.getSignificantBits() > DestBW)
775 return AnyValue::poison();
776 if (Trunc.hasNoUnsignedWrap() && Operand.getActiveBits() > DestBW)
777 return AnyValue::poison();
778 return Operand.trunc(width: DestBW);
779 });
780 }
781
782 void visitZExtInst(ZExtInst &ZExt) {
783 visitIntUnOp(I&: ZExt, ScalarFn: [&](const APInt &Operand) -> AnyValue {
784 uint32_t DestBW = ZExt.getDestTy()->getScalarSizeInBits();
785 if (ZExt.hasNonNeg() && Operand.isNegative())
786 return AnyValue::poison();
787 return Operand.zext(width: DestBW);
788 });
789 }
790
791 void visitSExtInst(SExtInst &SExt) {
792 visitIntUnOp(I&: SExt, ScalarFn: [&](const APInt &Operand) -> AnyValue {
793 uint32_t DestBW = SExt.getDestTy()->getScalarSizeInBits();
794 return Operand.sext(width: DestBW);
795 });
796 }
797
798 void visitAnd(BinaryOperator &I) {
799 visitIntBinOp(I, ScalarFn: [](const APInt &LHS, const APInt &RHS) -> AnyValue {
800 return LHS & RHS;
801 });
802 }
803
804 void visitXor(BinaryOperator &I) {
805 visitIntBinOp(I, ScalarFn: [](const APInt &LHS, const APInt &RHS) -> AnyValue {
806 return LHS ^ RHS;
807 });
808 }
809
810 void visitOr(BinaryOperator &I) {
811 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) -> AnyValue {
812 if (cast<PossiblyDisjointInst>(Val&: I).isDisjoint() && LHS.intersects(RHS))
813 return AnyValue::poison();
814 return LHS | RHS;
815 });
816 }
817
818 void visitShl(BinaryOperator &I) {
819 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) -> AnyValue {
820 if (RHS.uge(RHS: LHS.getBitWidth()))
821 return AnyValue::poison();
822 if (I.hasNoSignedWrap() && RHS.uge(RHS: LHS.getNumSignBits()))
823 return AnyValue::poison();
824 if (I.hasNoUnsignedWrap() && RHS.ugt(RHS: LHS.countl_zero()))
825 return AnyValue::poison();
826 return LHS.shl(ShiftAmt: RHS);
827 });
828 }
829
830 void visitLShr(BinaryOperator &I) {
831 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) -> AnyValue {
832 if (RHS.uge(RHS: cast<PossiblyExactOperator>(Val&: I).isExact()
833 ? LHS.countr_zero() + 1
834 : LHS.getBitWidth()))
835 return AnyValue::poison();
836 return LHS.lshr(ShiftAmt: RHS);
837 });
838 }
839
840 void visitAShr(BinaryOperator &I) {
841 visitIntBinOp(I, ScalarFn: [&](const APInt &LHS, const APInt &RHS) -> AnyValue {
842 if (RHS.uge(RHS: cast<PossiblyExactOperator>(Val&: I).isExact()
843 ? LHS.countr_zero() + 1
844 : LHS.getBitWidth()))
845 return AnyValue::poison();
846 return LHS.ashr(ShiftAmt: RHS);
847 });
848 }
849
850 void visitICmpInst(ICmpInst &I) {
851 visitBinOp(I, ScalarFn: [&](const AnyValue &LHS, const AnyValue &RHS) -> AnyValue {
852 if (LHS.isPoison() || RHS.isPoison())
853 return AnyValue::poison();
854 // TODO: handle pointer comparison.
855 const APInt &LHSVal = LHS.asInteger();
856 const APInt &RHSVal = RHS.asInteger();
857 if (I.hasSameSign() && LHSVal.isNonNegative() != RHSVal.isNonNegative())
858 return AnyValue::poison();
859 return AnyValue::boolean(
860 Val: ICmpInst::compare(LHS: LHSVal, RHS: RHSVal, Pred: I.getPredicate()));
861 });
862 }
863
864 void visitSelect(SelectInst &SI) {
865 // TODO: handle fast-math flags.
866 if (SI.getCondition()->getType()->isIntegerTy(Bitwidth: 1)) {
867 switch (getValue(V: SI.getCondition()).asBoolean()) {
868 case BooleanKind::True:
869 setResult(I&: SI, V: getValue(V: SI.getTrueValue()));
870 return;
871 case BooleanKind::False:
872 setResult(I&: SI, V: getValue(V: SI.getFalseValue()));
873 return;
874 case BooleanKind::Poison:
875 setResult(I&: SI, V: AnyValue::getPoisonValue(Ctx, Ty: SI.getType()));
876 return;
877 }
878 }
879
880 auto &Cond = getValue(V: SI.getCondition()).asAggregate();
881 auto &TV = getValue(V: SI.getTrueValue()).asAggregate();
882 auto &FV = getValue(V: SI.getFalseValue()).asAggregate();
883 std::vector<AnyValue> Res;
884 size_t Len = Cond.size();
885 Res.reserve(n: Len);
886 for (uint32_t I = 0; I != Len; ++I) {
887 switch (Cond[I].asBoolean()) {
888 case BooleanKind::True:
889 Res.push_back(x: TV[I]);
890 break;
891 case BooleanKind::False:
892 Res.push_back(x: FV[I]);
893 break;
894 case BooleanKind::Poison:
895 Res.push_back(x: AnyValue::poison());
896 break;
897 }
898 }
899 setResult(I&: SI, V: std::move(Res));
900 }
901
902 void visitAllocaInst(AllocaInst &AI) {
903 uint64_t AllocSize = Ctx.getEffectiveTypeAllocSize(Ty: AI.getAllocatedType());
904 if (AI.isArrayAllocation()) {
905 auto &Size = getValue(V: AI.getArraySize());
906 if (Size.isPoison()) {
907 reportImmediateUB(Msg: "Alloca with poison array size.");
908 return;
909 }
910 if (Size.asInteger().getActiveBits() > 64) {
911 reportImmediateUB(
912 Msg: "Alloca with large array size that overflows uint64_t.");
913 return;
914 }
915 bool Overflowed = false;
916 AllocSize = SaturatingMultiply(X: AllocSize, Y: Size.asInteger().getZExtValue(),
917 ResultOverflowed: &Overflowed);
918 if (Overflowed) {
919 reportImmediateUB(
920 Msg: "Alloca with allocation size that overflows uint64_t.");
921 return;
922 }
923 }
924 // If it is used by llvm.lifetime.start, it should be initially dead.
925 bool IsInitiallyDead = any_of(Range: AI.users(), P: [](User *U) {
926 return match(V: U, P: m_Intrinsic<Intrinsic::lifetime_start>());
927 });
928 auto Obj = Ctx.allocate(Size: AllocSize, Align: AI.getPointerAlignment(DL).value(),
929 Name: AI.getName(), AS: AI.getAddressSpace(),
930 InitKind: IsInitiallyDead ? MemInitKind::Poisoned
931 : MemInitKind::Uninitialized);
932 if (!Obj) {
933 reportError(Msg: "Insufficient stack space.");
934 return;
935 }
936 CurrentFrame->Allocas.push_back(Elt: Obj);
937 setResult(I&: AI, V: Ctx.deriveFromMemoryObject(Obj));
938 }
939
940 void visitGetElementPtrInst(GetElementPtrInst &GEP) {
941 uint32_t IndexBitWidth =
942 DL.getIndexSizeInBits(AS: GEP.getType()->getPointerAddressSpace());
943 GEPNoWrapFlags Flags = GEP.getNoWrapFlags();
944 AnyValue Res = getValue(V: GEP.getPointerOperand());
945 AnyValue AccumulatedOffset = APInt(IndexBitWidth, 0);
946 if (Res.isAggregate())
947 AccumulatedOffset =
948 AnyValue::getVectorSplat(Scalar: AccumulatedOffset, NumElements: Res.asAggregate().size());
949 auto ApplyScaledOffset = [&](const AnyValue &Index, const APInt &Scale) {
950 if (Index.isAggregate() && !Res.isAggregate()) {
951 Res = AnyValue::getVectorSplat(Scalar: Res, NumElements: Index.asAggregate().size());
952 AccumulatedOffset = AnyValue::getVectorSplat(
953 Scalar: AccumulatedOffset, NumElements: Index.asAggregate().size());
954 }
955 if (Index.isAggregate() && Res.isAggregate()) {
956 for (auto &&[ResElem, IndexElem, OffsetElem] :
957 zip(t&: Res.asAggregate(), u: Index.asAggregate(),
958 args&: AccumulatedOffset.asAggregate()))
959 ResElem = computeScaledPtrAdd(
960 Ptr: ResElem, Index: canonicalizeIndex(Idx: IndexElem, IndexBitWidth, Flags),
961 Scale, Flags, AccumulatedOffset&: OffsetElem);
962 } else {
963 AnyValue CanonicalIndex =
964 canonicalizeIndex(Idx: Index, IndexBitWidth, Flags);
965 if (Res.isAggregate()) {
966 for (auto &&[ResElem, OffsetElem] :
967 zip(t&: Res.asAggregate(), u&: AccumulatedOffset.asAggregate()))
968 ResElem = computeScaledPtrAdd(Ptr: ResElem, Index: CanonicalIndex, Scale, Flags,
969 AccumulatedOffset&: OffsetElem);
970 } else {
971 Res = computeScaledPtrAdd(Ptr: Res, Index: CanonicalIndex, Scale, Flags,
972 AccumulatedOffset);
973 }
974 }
975 };
976
977 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
978 GTI != GTE; ++GTI) {
979 Value *V = GTI.getOperand();
980
981 // Fast path for zero offsets.
982 if (auto *CI = dyn_cast<ConstantInt>(Val: V)) {
983 if (CI->isZero())
984 continue;
985 }
986 if (isa<ConstantAggregateZero>(Val: V))
987 continue;
988
989 // Handle a struct index, which adds its field offset to the pointer.
990 if (StructType *STy = GTI.getStructTypeOrNull()) {
991 unsigned ElementIdx = cast<ConstantInt>(Val: V)->getZExtValue();
992 const StructLayout *SL = DL.getStructLayout(Ty: STy);
993 // Element offset is in bytes.
994 ApplyScaledOffset(
995 APInt(IndexBitWidth, SL->getElementOffset(Idx: ElementIdx)),
996 APInt(IndexBitWidth, 1));
997 continue;
998 }
999
1000 // Truncate if type size exceeds index space.
1001 // TODO: Should be documented in LangRef: GEPs with nowrap flags should
1002 // return poison when the type size exceeds index space.
1003 TypeSize Offset = GTI.getSequentialElementStride(DL);
1004 APInt Scale(IndexBitWidth, Ctx.getEffectiveTypeSize(Size: Offset),
1005 /*isSigned=*/false, /*implicitTrunc=*/true);
1006 if (!Scale.isZero())
1007 ApplyScaledOffset(getValue(V), Scale);
1008 }
1009
1010 setResult(I&: GEP, V: std::move(Res));
1011 }
1012
1013 void visitIntToPtr(IntToPtrInst &I) {
1014 return visitUnOp(I, ScalarFn: [&](const AnyValue &V) -> AnyValue {
1015 if (V.isPoison())
1016 return AnyValue::poison();
1017 // TODO: expose provenance
1018 // TODO: check metadata
1019 return Pointer(V.asInteger().zextOrTrunc(
1020 width: DL.getPointerSizeInBits(AS: I.getType()->getPointerAddressSpace())));
1021 });
1022 }
1023
1024 void visitLoadInst(LoadInst &LI) {
1025 auto RetVal =
1026 load(Ptr: getValue(V: LI.getPointerOperand()), Alignment: LI.getAlign(), ValTy: LI.getType());
1027 // TODO: track volatile loads
1028 // TODO: handle metadata
1029 setResult(I&: LI, V: std::move(RetVal));
1030 }
1031
1032 void visitStoreInst(StoreInst &SI) {
1033 auto &Ptr = getValue(V: SI.getPointerOperand());
1034 auto &Val = getValue(V: SI.getValueOperand());
1035 // TODO: track volatile stores
1036 // TODO: handle metadata
1037 store(Ptr, Alignment: SI.getAlign(), Val, ValTy: SI.getValueOperand()->getType());
1038 if (Status)
1039 Status &= Handler.onInstructionExecuted(I&: SI, Result: AnyValue());
1040 }
1041
1042 void visitInstruction(Instruction &I) {
1043 Handler.onUnrecognizedInstruction(I);
1044 Status = false;
1045 }
1046
1047 void visitExtractValueInst(ExtractValueInst &EVI) {
1048 auto &Res = getValue(V: EVI.getAggregateOperand());
1049 const AnyValue *Pos = &Res;
1050 for (unsigned Idx : EVI.indices())
1051 Pos = &Pos->asAggregate()[Idx];
1052 setResult(I&: EVI, V: *Pos);
1053 }
1054
1055 void visitInsertValueInst(InsertValueInst &IVI) {
1056 AnyValue Res = getValue(V: IVI.getAggregateOperand());
1057 AnyValue *Pos = &Res;
1058 for (unsigned Idx : IVI.indices())
1059 Pos = &Pos->asAggregate()[Idx];
1060 *Pos = getValue(V: IVI.getInsertedValueOperand());
1061 setResult(I&: IVI, V: std::move(Res));
1062 }
1063
1064 void visitInsertElementInst(InsertElementInst &IEI) {
1065 auto Res = getValue(V: IEI.getOperand(i_nocapture: 0));
1066 auto &ResVec = Res.asAggregate();
1067 auto &Idx = getValue(V: IEI.getOperand(i_nocapture: 2));
1068 if (Idx.isPoison() || Idx.asInteger().uge(RHS: ResVec.size())) {
1069 setResult(I&: IEI, V: AnyValue::getPoisonValue(Ctx, Ty: IEI.getType()));
1070 return;
1071 }
1072 ResVec[Idx.asInteger().getZExtValue()] = getValue(V: IEI.getOperand(i_nocapture: 1));
1073 setResult(I&: IEI, V: std::move(Res));
1074 }
1075
1076 void visitExtractElementInst(ExtractElementInst &EEI) {
1077 auto &SrcVec = getValue(V: EEI.getOperand(i_nocapture: 0)).asAggregate();
1078 auto &Idx = getValue(V: EEI.getOperand(i_nocapture: 1));
1079 if (Idx.isPoison() || Idx.asInteger().uge(RHS: SrcVec.size())) {
1080 setResult(I&: EEI, V: AnyValue::getPoisonValue(Ctx, Ty: EEI.getType()));
1081 return;
1082 }
1083 setResult(I&: EEI, V: SrcVec[Idx.asInteger().getZExtValue()]);
1084 }
1085
1086 void visitShuffleVectorInst(ShuffleVectorInst &SVI) {
1087 auto &LHSVec = getValue(V: SVI.getOperand(i_nocapture: 0)).asAggregate();
1088 auto &RHSVec = getValue(V: SVI.getOperand(i_nocapture: 1)).asAggregate();
1089 uint32_t Size = cast<VectorType>(Val: SVI.getOperand(i_nocapture: 0)->getType())
1090 ->getElementCount()
1091 .getKnownMinValue();
1092 std::vector<AnyValue> Res;
1093 uint32_t DstLen = Ctx.getEVL(EC: SVI.getType()->getElementCount());
1094 Res.reserve(n: DstLen);
1095 uint32_t Stride = SVI.getShuffleMask().size();
1096 // For scalable vectors, we need to repeat the shuffle mask until we fill
1097 // the destination vector.
1098 for (uint32_t Off = 0; Off != DstLen; Off += Stride) {
1099 for (int Idx : SVI.getShuffleMask()) {
1100 if (Idx == PoisonMaskElem)
1101 Res.push_back(x: AnyValue::poison());
1102 else if (Idx < static_cast<int>(Size))
1103 Res.push_back(x: LHSVec[Idx]);
1104 else
1105 Res.push_back(x: RHSVec[Idx - Size]);
1106 }
1107 }
1108 setResult(I&: SVI, V: std::move(Res));
1109 }
1110
1111 void visitBitCastInst(BitCastInst &BCI) {
1112 // The conversion is done as if the value had been stored to memory and read
1113 // back as the target type.
1114 SmallVector<Byte> Bytes;
1115 Bytes.resize(N: Ctx.getEffectiveTypeStoreSize(Ty: BCI.getType()),
1116 NV: Byte::concrete(Val: 0));
1117 Ctx.toBytes(Val: getValue(V: BCI.getOperand(i_nocapture: 0)), Ty: BCI.getOperand(i_nocapture: 0)->getType(),
1118 Bytes);
1119 setResult(I&: BCI, V: Ctx.fromBytes(Bytes, Ty: BCI.getType()));
1120 }
1121
1122 void visitFreezeInst(FreezeInst &FI) {
1123 AnyValue Val = getValue(V: FI.getOperand(i_nocapture: 0));
1124 Ctx.freeze(Val, Ty: FI.getType());
1125 setResult(I&: FI, V: std::move(Val));
1126 }
1127
1128 /// This function implements the main interpreter loop.
1129 /// It handles function calls in a non-recursive manner to avoid stack
1130 /// overflows.
1131 bool runMainLoop() {
1132 uint32_t MaxSteps = Ctx.getMaxSteps();
1133 uint32_t Steps = 0;
1134 while (Status && !CallStack.empty()) {
1135 Frame &Top = CallStack.back();
1136 CurrentFrame = &Top;
1137 if (Top.State == FrameState::Entry) {
1138 Handler.onFunctionEntry(F&: Top.Func, Args: Top.Args, CallSite: Top.CallSite);
1139 } else {
1140 assert(Top.State == FrameState::Pending &&
1141 "Expected to return from a callee.");
1142 returnFromCallee();
1143 }
1144
1145 Top.State = FrameState::Running;
1146 // Interpreter loop inside a function
1147 while (Status) {
1148 assert(Top.State == FrameState::Running &&
1149 "Expected to be in running state.");
1150 if (MaxSteps != 0 && Steps >= MaxSteps) {
1151 reportError(Msg: "Exceeded maximum number of execution steps.");
1152 break;
1153 }
1154 ++Steps;
1155
1156 Instruction &I = *Top.PC;
1157 visit(I: &I);
1158 if (!Status)
1159 break;
1160
1161 // A function call or return has occurred.
1162 // We need to exit the inner loop and switch to a different frame.
1163 if (Top.State != FrameState::Running)
1164 break;
1165
1166 // Otherwise, move to the next instruction if it is not a terminator.
1167 // For terminators, the PC is updated in the visit* method.
1168 if (!I.isTerminator())
1169 ++Top.PC;
1170 }
1171
1172 if (!Status)
1173 break;
1174
1175 if (Top.State == FrameState::Exit) {
1176 assert((Top.Func.getReturnType()->isVoidTy() || !Top.RetVal.isNone()) &&
1177 "Expected return value to be set on function exit.");
1178 Handler.onFunctionExit(F&: Top.Func, RetVal: Top.RetVal);
1179 // Free stack objects allocated in this frame.
1180 for (auto &Obj : Top.Allocas)
1181 Ctx.free(Address: Obj->getAddress());
1182 CallStack.pop_back();
1183 } else {
1184 assert(Top.State == FrameState::Pending &&
1185 "Expected to enter a callee.");
1186 }
1187 }
1188 return Status;
1189 }
1190};
1191
1192bool Context::runFunction(Function &F, ArrayRef<AnyValue> Args,
1193 AnyValue &RetVal, EventHandler &Handler) {
1194 InstExecutor Executor(*this, Handler, F, Args, RetVal);
1195 return Executor.runMainLoop();
1196}
1197
1198} // namespace llvm::ubi
1199