1//===- SafeStack.cpp - Safe Stack Insertion -------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass splits the stack into the safe stack (kept as-is for LLVM backend)
10// and the unsafe stack (explicitly allocated and managed through the runtime
11// support library).
12//
13// http://clang.llvm.org/docs/SafeStack.html
14//
15//===----------------------------------------------------------------------===//
16
17#include "llvm/CodeGen/SafeStack.h"
18#include "SafeStackLayout.h"
19#include "llvm/ADT/APInt.h"
20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/SmallPtrSet.h"
22#include "llvm/ADT/SmallVector.h"
23#include "llvm/ADT/Statistic.h"
24#include "llvm/Analysis/AssumptionCache.h"
25#include "llvm/Analysis/BranchProbabilityInfo.h"
26#include "llvm/Analysis/DomTreeUpdater.h"
27#include "llvm/Analysis/InlineCost.h"
28#include "llvm/Analysis/LoopInfo.h"
29#include "llvm/Analysis/ScalarEvolution.h"
30#include "llvm/Analysis/ScalarEvolutionExpressions.h"
31#include "llvm/Analysis/StackLifetime.h"
32#include "llvm/Analysis/TargetLibraryInfo.h"
33#include "llvm/CodeGen/TargetLowering.h"
34#include "llvm/CodeGen/TargetPassConfig.h"
35#include "llvm/CodeGen/TargetSubtargetInfo.h"
36#include "llvm/IR/Argument.h"
37#include "llvm/IR/Attributes.h"
38#include "llvm/IR/ConstantRange.h"
39#include "llvm/IR/Constants.h"
40#include "llvm/IR/DIBuilder.h"
41#include "llvm/IR/DataLayout.h"
42#include "llvm/IR/DerivedTypes.h"
43#include "llvm/IR/Dominators.h"
44#include "llvm/IR/Function.h"
45#include "llvm/IR/IRBuilder.h"
46#include "llvm/IR/InstIterator.h"
47#include "llvm/IR/Instruction.h"
48#include "llvm/IR/Instructions.h"
49#include "llvm/IR/IntrinsicInst.h"
50#include "llvm/IR/Intrinsics.h"
51#include "llvm/IR/MDBuilder.h"
52#include "llvm/IR/Metadata.h"
53#include "llvm/IR/Module.h"
54#include "llvm/IR/Type.h"
55#include "llvm/IR/Use.h"
56#include "llvm/IR/Value.h"
57#include "llvm/InitializePasses.h"
58#include "llvm/Pass.h"
59#include "llvm/Support/Casting.h"
60#include "llvm/Support/Debug.h"
61#include "llvm/Support/ErrorHandling.h"
62#include "llvm/Support/raw_ostream.h"
63#include "llvm/Target/TargetMachine.h"
64#include "llvm/Transforms/Utils/BasicBlockUtils.h"
65#include "llvm/Transforms/Utils/Cloning.h"
66#include "llvm/Transforms/Utils/Local.h"
67#include <algorithm>
68#include <cassert>
69#include <cstdint>
70#include <optional>
71#include <string>
72
73using namespace llvm;
74using namespace llvm::safestack;
75
76#define DEBUG_TYPE "safe-stack"
77
78STATISTIC(NumFunctions, "Total number of functions");
79STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
80STATISTIC(NumUnsafeStackRestorePointsFunctions,
81 "Number of functions that use setjmp or exceptions");
82
83STATISTIC(NumAllocas, "Total number of allocas");
84STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
85STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
86STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments");
87STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
88
89/// Use __safestack_pointer_address even if the platform has a faster way of
90/// access safe stack pointer.
91static cl::opt<bool>
92 SafeStackUsePointerAddress("safestack-use-pointer-address",
93 cl::init(Val: false), cl::Hidden);
94
95static cl::opt<bool> ClColoring("safe-stack-coloring",
96 cl::desc("enable safe stack coloring"),
97 cl::Hidden, cl::init(Val: true));
98
99namespace {
100
101/// The SafeStack pass splits the stack of each function into the safe
102/// stack, which is only accessed through memory safe dereferences (as
103/// determined statically), and the unsafe stack, which contains all
104/// local variables that are accessed in ways that we can't prove to
105/// be safe.
106class SafeStack {
107 Function &F;
108 const TargetLoweringBase &TL;
109 const LibcallLoweringInfo &Libcalls;
110 const DataLayout &DL;
111 DomTreeUpdater *DTU;
112 ScalarEvolution &SE;
113
114 Type *StackPtrTy;
115 Type *IntPtrTy;
116 Type *Int32Ty;
117
118 Value *UnsafeStackPtr = nullptr;
119
120 /// Unsafe stack alignment. Each stack frame must ensure that the stack is
121 /// aligned to this value. We need to re-align the unsafe stack if the
122 /// alignment of any object on the stack exceeds this value.
123 ///
124 /// 16 seems like a reasonable upper bound on the alignment of objects that we
125 /// might expect to appear on the stack on most common targets.
126 static constexpr Align StackAlignment = Align::Constant<16>();
127
128 /// Return the value of the stack canary.
129 Value *getStackGuard(IRBuilder<> &IRB, Function &F);
130
131 /// Load stack guard from the frame and check if it has changed.
132 void checkStackGuard(IRBuilder<> &IRB, Function &F, Instruction &RI,
133 AllocaInst *StackGuardSlot, Value *StackGuard);
134
135 /// Find all static allocas, dynamic allocas, return instructions and
136 /// stack restore points (exception unwind blocks and setjmp calls) in the
137 /// given function and append them to the respective vectors.
138 void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas,
139 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
140 SmallVectorImpl<Argument *> &ByValArguments,
141 SmallVectorImpl<Instruction *> &Returns,
142 SmallVectorImpl<Instruction *> &StackRestorePoints);
143
144 /// Calculate the allocation size of a given alloca. Returns 0 if the
145 /// size can not be statically determined.
146 uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
147
148 /// Allocate space for all static allocas in \p StaticAllocas,
149 /// replace allocas with pointers into the unsafe stack.
150 ///
151 /// \returns A pointer to the top of the unsafe stack after all unsafe static
152 /// allocas are allocated.
153 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
154 ArrayRef<AllocaInst *> StaticAllocas,
155 ArrayRef<Argument *> ByValArguments,
156 Instruction *BasePointer,
157 AllocaInst *StackGuardSlot);
158
159 /// Generate code to restore the stack after all stack restore points
160 /// in \p StackRestorePoints.
161 ///
162 /// \returns A local variable in which to maintain the dynamic top of the
163 /// unsafe stack if needed.
164 AllocaInst *
165 createStackRestorePoints(IRBuilder<> &IRB, Function &F,
166 ArrayRef<Instruction *> StackRestorePoints,
167 Value *StaticTop, bool NeedDynamicTop);
168
169 /// Replace all allocas in \p DynamicAllocas with code to allocate
170 /// space dynamically on the unsafe stack and store the dynamic unsafe stack
171 /// top to \p DynamicTop if non-null.
172 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
173 AllocaInst *DynamicTop,
174 ArrayRef<AllocaInst *> DynamicAllocas);
175
176 bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize);
177
178 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
179 const Value *AllocaPtr, uint64_t AllocaSize);
180 bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr,
181 uint64_t AllocaSize);
182
183 bool ShouldInlinePointerAddress(CallInst &CI);
184 void TryInlinePointerAddress();
185
186public:
187 SafeStack(Function &F, const TargetLoweringBase &TL,
188 const LibcallLoweringInfo &Libcalls, const DataLayout &DL,
189 DomTreeUpdater *DTU, ScalarEvolution &SE)
190 : F(F), TL(TL), Libcalls(Libcalls), DL(DL), DTU(DTU), SE(SE),
191 StackPtrTy(DL.getAllocaPtrType(Ctx&: F.getContext())),
192 IntPtrTy(DL.getIntPtrType(C&: F.getContext())),
193 Int32Ty(Type::getInt32Ty(C&: F.getContext())) {}
194
195 // Run the transformation on the associated function.
196 // Returns whether the function was changed.
197 bool run();
198};
199
200uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) {
201 if (auto Size = AI->getAllocationSize(DL))
202 if (Size->isFixed())
203 return Size->getFixedValue();
204 return 0;
205}
206
207bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize,
208 const Value *AllocaPtr, uint64_t AllocaSize) {
209 const SCEV *AddrExpr = SE.getSCEV(V: Addr);
210 const auto *Base = dyn_cast<SCEVUnknown>(Val: SE.getPointerBase(V: AddrExpr));
211 if (!Base || Base->getValue() != AllocaPtr) {
212 LLVM_DEBUG(
213 dbgs() << "[SafeStack] "
214 << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ")
215 << *AllocaPtr << "\n"
216 << "SCEV " << *AddrExpr << " not directly based on alloca\n");
217 return false;
218 }
219
220 const SCEV *Expr = SE.removePointerBase(S: AddrExpr);
221 uint64_t BitWidth = SE.getTypeSizeInBits(Ty: Expr->getType());
222 ConstantRange AccessStartRange = SE.getUnsignedRange(S: Expr);
223 ConstantRange SizeRange =
224 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize));
225 ConstantRange AccessRange = AccessStartRange.add(Other: SizeRange);
226 ConstantRange AllocaRange =
227 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize));
228 bool Safe = AllocaRange.contains(CR: AccessRange);
229
230 LLVM_DEBUG(
231 dbgs() << "[SafeStack] "
232 << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ")
233 << *AllocaPtr << "\n"
234 << " Access " << *Addr << "\n"
235 << " SCEV " << *Expr
236 << " U: " << SE.getUnsignedRange(Expr)
237 << ", S: " << SE.getSignedRange(Expr) << "\n"
238 << " Range " << AccessRange << "\n"
239 << " AllocaRange " << AllocaRange << "\n"
240 << " " << (Safe ? "safe" : "unsafe") << "\n");
241
242 return Safe;
243}
244
245bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
246 const Value *AllocaPtr,
247 uint64_t AllocaSize) {
248 if (auto MTI = dyn_cast<MemTransferInst>(Val: MI)) {
249 if (MTI->getRawSource() != U && MTI->getRawDest() != U)
250 return true;
251 } else {
252 if (MI->getRawDest() != U)
253 return true;
254 }
255
256 auto Len = MI->getLengthInBytes();
257 // Non-constant size => unsafe. FIXME: try SCEV getRange.
258 if (!Len) return false;
259 return IsAccessSafe(Addr: U, AccessSize: Len->getZExtValue(), AllocaPtr, AllocaSize);
260}
261
262/// Check whether a given allocation must be put on the safe
263/// stack or not. The function analyzes all uses of AI and checks whether it is
264/// only accessed in a memory safe way (as decided statically).
265bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
266 // Go through all uses of this alloca and check whether all accesses to the
267 // allocated object are statically known to be memory safe and, hence, the
268 // object can be placed on the safe stack.
269 SmallPtrSet<const Value *, 16> Visited;
270 SmallVector<const Value *, 8> WorkList;
271 WorkList.push_back(Elt: AllocaPtr);
272
273 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
274 while (!WorkList.empty()) {
275 const Value *V = WorkList.pop_back_val();
276 for (const Use &UI : V->uses()) {
277 auto I = cast<const Instruction>(Val: UI.getUser());
278 assert(V == UI.get());
279
280 switch (I->getOpcode()) {
281 case Instruction::Load:
282 if (!IsAccessSafe(Addr: UI, AccessSize: DL.getTypeStoreSize(Ty: I->getType()), AllocaPtr,
283 AllocaSize))
284 return false;
285 break;
286
287 case Instruction::VAArg:
288 // "va-arg" from a pointer is safe.
289 break;
290 case Instruction::Store:
291 if (V == I->getOperand(i: 0)) {
292 // Stored the pointer - conservatively assume it may be unsafe.
293 LLVM_DEBUG(dbgs()
294 << "[SafeStack] Unsafe alloca: " << *AllocaPtr
295 << "\n store of address: " << *I << "\n");
296 return false;
297 }
298
299 if (!IsAccessSafe(Addr: UI, AccessSize: DL.getTypeStoreSize(Ty: I->getOperand(i: 0)->getType()),
300 AllocaPtr, AllocaSize))
301 return false;
302 break;
303
304 case Instruction::Ret:
305 // Information leak.
306 return false;
307
308 case Instruction::Call:
309 case Instruction::Invoke: {
310 const CallBase &CS = *cast<CallBase>(Val: I);
311
312 if (I->isLifetimeStartOrEnd())
313 continue;
314
315 if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(Val: I)) {
316 if (!IsMemIntrinsicSafe(MI, U: UI, AllocaPtr, AllocaSize)) {
317 LLVM_DEBUG(dbgs()
318 << "[SafeStack] Unsafe alloca: " << *AllocaPtr
319 << "\n unsafe memintrinsic: " << *I << "\n");
320 return false;
321 }
322 continue;
323 }
324
325 // LLVM 'nocapture' attribute is only set for arguments whose address
326 // is not stored, passed around, or used in any other non-trivial way.
327 // We assume that passing a pointer to an object as a 'nocapture
328 // readnone' argument is safe.
329 // FIXME: a more precise solution would require an interprocedural
330 // analysis here, which would look at all uses of an argument inside
331 // the function being called.
332 auto B = CS.arg_begin(), E = CS.arg_end();
333 for (const auto *A = B; A != E; ++A)
334 if (A->get() == V)
335 if (!(CS.doesNotCapture(OpNo: A - B) && (CS.doesNotAccessMemory(OpNo: A - B) ||
336 CS.doesNotAccessMemory()))) {
337 LLVM_DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
338 << "\n unsafe call: " << *I << "\n");
339 return false;
340 }
341 continue;
342 }
343
344 default:
345 if (Visited.insert(Ptr: I).second)
346 WorkList.push_back(Elt: cast<const Instruction>(Val: I));
347 }
348 }
349 }
350
351 // All uses of the alloca are safe, we can place it on the safe stack.
352 return true;
353}
354
355Value *SafeStack::getStackGuard(IRBuilder<> &IRB, Function &F) {
356 Value *StackGuardVar = TL.getIRStackGuard(IRB, Libcalls);
357 Module *M = F.getParent();
358
359 if (!StackGuardVar) {
360 TL.insertSSPDeclarations(M&: *M, Libcalls);
361 return IRB.CreateIntrinsic(ID: Intrinsic::stackguard, Args: {});
362 }
363
364 return IRB.CreateLoad(Ty: StackPtrTy, Ptr: StackGuardVar, Name: "StackGuard");
365}
366
367void SafeStack::findInsts(Function &F,
368 SmallVectorImpl<AllocaInst *> &StaticAllocas,
369 SmallVectorImpl<AllocaInst *> &DynamicAllocas,
370 SmallVectorImpl<Argument *> &ByValArguments,
371 SmallVectorImpl<Instruction *> &Returns,
372 SmallVectorImpl<Instruction *> &StackRestorePoints) {
373 for (Instruction &I : instructions(F: &F)) {
374 if (auto AI = dyn_cast<AllocaInst>(Val: &I)) {
375 ++NumAllocas;
376
377 uint64_t Size = getStaticAllocaAllocationSize(AI);
378 if (IsSafeStackAlloca(AllocaPtr: AI, AllocaSize: Size))
379 continue;
380
381 if (AI->isStaticAlloca()) {
382 ++NumUnsafeStaticAllocas;
383 StaticAllocas.push_back(Elt: AI);
384 } else {
385 ++NumUnsafeDynamicAllocas;
386 DynamicAllocas.push_back(Elt: AI);
387 }
388 } else if (auto RI = dyn_cast<ReturnInst>(Val: &I)) {
389 if (CallInst *CI = I.getParent()->getTerminatingMustTailCall())
390 Returns.push_back(Elt: CI);
391 else
392 Returns.push_back(Elt: RI);
393 } else if (auto CI = dyn_cast<CallInst>(Val: &I)) {
394 // setjmps require stack restore.
395 if (CI->getCalledFunction() && CI->canReturnTwice())
396 StackRestorePoints.push_back(Elt: CI);
397 } else if (auto LP = dyn_cast<LandingPadInst>(Val: &I)) {
398 // Exception landing pads require stack restore.
399 StackRestorePoints.push_back(Elt: LP);
400 } else if (auto II = dyn_cast<IntrinsicInst>(Val: &I)) {
401 if (II->getIntrinsicID() == Intrinsic::gcroot)
402 report_fatal_error(
403 reason: "gcroot intrinsic not compatible with safestack attribute");
404 }
405 }
406 for (Argument &Arg : F.args()) {
407 if (!Arg.hasByValAttr())
408 continue;
409 uint64_t Size = DL.getTypeStoreSize(Ty: Arg.getParamByValType());
410 if (IsSafeStackAlloca(AllocaPtr: &Arg, AllocaSize: Size))
411 continue;
412
413 ++NumUnsafeByValArguments;
414 ByValArguments.push_back(Elt: &Arg);
415 }
416}
417
418AllocaInst *
419SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
420 ArrayRef<Instruction *> StackRestorePoints,
421 Value *StaticTop, bool NeedDynamicTop) {
422 assert(StaticTop && "The stack top isn't set.");
423
424 if (StackRestorePoints.empty())
425 return nullptr;
426
427 // We need the current value of the shadow stack pointer to restore
428 // after longjmp or exception catching.
429
430 // FIXME: On some platforms this could be handled by the longjmp/exception
431 // runtime itself.
432
433 AllocaInst *DynamicTop = nullptr;
434 if (NeedDynamicTop) {
435 // If we also have dynamic alloca's, the stack pointer value changes
436 // throughout the function. For now we store it in an alloca.
437 DynamicTop = IRB.CreateAlloca(Ty: StackPtrTy, /*ArraySize=*/nullptr,
438 Name: "unsafe_stack_dynamic_ptr");
439 IRB.CreateStore(Val: StaticTop, Ptr: DynamicTop);
440 }
441
442 // Restore current stack pointer after longjmp/exception catch.
443 for (Instruction *I : StackRestorePoints) {
444 ++NumUnsafeStackRestorePoints;
445
446 IRB.SetInsertPoint(I->getNextNode());
447 Value *CurrentTop =
448 DynamicTop ? IRB.CreateLoad(Ty: StackPtrTy, Ptr: DynamicTop) : StaticTop;
449 IRB.CreateStore(Val: CurrentTop, Ptr: UnsafeStackPtr);
450 }
451
452 return DynamicTop;
453}
454
455void SafeStack::checkStackGuard(IRBuilder<> &IRB, Function &F, Instruction &RI,
456 AllocaInst *StackGuardSlot, Value *StackGuard) {
457 Value *V = IRB.CreateLoad(Ty: StackPtrTy, Ptr: StackGuardSlot);
458 Value *Cmp = IRB.CreateICmpNE(LHS: StackGuard, RHS: V);
459
460 auto SuccessProb = BranchProbabilityInfo::getBranchProbStackProtector(IsLikely: true);
461 auto FailureProb = BranchProbabilityInfo::getBranchProbStackProtector(IsLikely: false);
462 MDNode *Weights = MDBuilder(F.getContext())
463 .createBranchWeights(TrueWeight: SuccessProb.getNumerator(),
464 FalseWeight: FailureProb.getNumerator());
465 Instruction *CheckTerm =
466 SplitBlockAndInsertIfThen(Cond: Cmp, SplitBefore: &RI, /* Unreachable */ true, BranchWeights: Weights, DTU);
467 IRBuilder<> IRBFail(CheckTerm);
468 // FIXME: respect -fsanitize-trap / -ftrap-function here?
469 RTLIB::LibcallImpl StackChkFailImpl =
470 Libcalls.getLibcallImpl(Call: RTLIB::STACKPROTECTOR_CHECK_FAIL);
471 if (StackChkFailImpl == RTLIB::Unsupported) {
472 F.getContext().emitError(
473 ErrorStr: "no libcall available for stackprotector check fail");
474 return;
475 }
476
477 StringRef StackChkFailName =
478 RTLIB::RuntimeLibcallsInfo::getLibcallImplName(CallImpl: StackChkFailImpl);
479
480 FunctionCallee StackChkFail =
481 F.getParent()->getOrInsertFunction(Name: StackChkFailName, RetTy: IRB.getVoidTy());
482 IRBFail.CreateCall(Callee: StackChkFail, Args: {});
483}
484
485/// We explicitly compute and set the unsafe stack layout for all unsafe
486/// static alloca instructions. We save the unsafe "base pointer" in the
487/// prologue into a local variable and restore it in the epilogue.
488Value *SafeStack::moveStaticAllocasToUnsafeStack(
489 IRBuilder<> &IRB, Function &F, ArrayRef<AllocaInst *> StaticAllocas,
490 ArrayRef<Argument *> ByValArguments, Instruction *BasePointer,
491 AllocaInst *StackGuardSlot) {
492 if (StaticAllocas.empty() && ByValArguments.empty())
493 return BasePointer;
494
495 DIBuilder DIB(*F.getParent());
496
497 StackLifetime SSC(F, StaticAllocas, StackLifetime::LivenessType::May);
498 static const StackLifetime::LiveRange NoColoringRange(1, true);
499 if (ClColoring)
500 SSC.run();
501
502 for (const auto *I : SSC.getMarkers()) {
503 auto *Op = dyn_cast<Instruction>(Val: I->getOperand(i_nocapture: 1));
504 const_cast<IntrinsicInst *>(I)->eraseFromParent();
505 // Remove the operand bitcast, too, if it has no more uses left.
506 if (Op && Op->use_empty())
507 Op->eraseFromParent();
508 }
509
510 // Unsafe stack always grows down.
511 StackLayout SSL(StackAlignment);
512 if (StackGuardSlot) {
513 SSL.addObject(V: StackGuardSlot, Size: getStaticAllocaAllocationSize(AI: StackGuardSlot),
514 Alignment: StackGuardSlot->getAlign(), Range: SSC.getFullLiveRange());
515 }
516
517 for (Argument *Arg : ByValArguments) {
518 Type *Ty = Arg->getParamByValType();
519 uint64_t Size = DL.getTypeStoreSize(Ty);
520 if (Size == 0)
521 Size = 1; // Don't create zero-sized stack objects.
522
523 // Ensure the object is properly aligned.
524 Align Align = DL.getPrefTypeAlign(Ty);
525 if (auto A = Arg->getParamAlign())
526 Align = std::max(a: Align, b: *A);
527 SSL.addObject(V: Arg, Size, Alignment: Align, Range: SSC.getFullLiveRange());
528 }
529
530 for (AllocaInst *AI : StaticAllocas) {
531 uint64_t Size = getStaticAllocaAllocationSize(AI);
532 if (Size == 0)
533 Size = 1; // Don't create zero-sized stack objects.
534
535 SSL.addObject(V: AI, Size, Alignment: AI->getAlign(),
536 Range: ClColoring ? SSC.getLiveRange(AI) : NoColoringRange);
537 }
538
539 SSL.computeLayout();
540 Align FrameAlignment = SSL.getFrameAlignment();
541
542 // FIXME: tell SSL that we start at a less-then-MaxAlignment aligned location
543 // (AlignmentSkew).
544 if (FrameAlignment > StackAlignment) {
545 // Re-align the base pointer according to the max requested alignment.
546 IRB.SetInsertPoint(BasePointer->getNextNode());
547 BasePointer = cast<Instruction>(Val: IRB.CreateIntToPtr(
548 V: IRB.CreateAnd(
549 LHS: IRB.CreatePtrToInt(V: BasePointer, DestTy: IntPtrTy),
550 RHS: ConstantInt::get(Ty: IntPtrTy, V: ~(FrameAlignment.value() - 1))),
551 DestTy: StackPtrTy));
552 }
553
554 IRB.SetInsertPoint(BasePointer->getNextNode());
555
556 if (StackGuardSlot) {
557 unsigned Offset = SSL.getObjectOffset(V: StackGuardSlot);
558 Value *Off =
559 IRB.CreatePtrAdd(Ptr: BasePointer, Offset: ConstantInt::get(Ty: Int32Ty, V: -Offset));
560 Value *NewAI =
561 IRB.CreateBitCast(V: Off, DestTy: StackGuardSlot->getType(), Name: "StackGuardSlot");
562
563 // Replace alloc with the new location.
564 StackGuardSlot->replaceAllUsesWith(V: NewAI);
565 StackGuardSlot->eraseFromParent();
566 }
567
568 for (Argument *Arg : ByValArguments) {
569 unsigned Offset = SSL.getObjectOffset(V: Arg);
570 MaybeAlign Align(SSL.getObjectAlignment(V: Arg));
571 Type *Ty = Arg->getParamByValType();
572
573 uint64_t Size = DL.getTypeStoreSize(Ty);
574 if (Size == 0)
575 Size = 1; // Don't create zero-sized stack objects.
576
577 Value *Off =
578 IRB.CreatePtrAdd(Ptr: BasePointer, Offset: ConstantInt::get(Ty: Int32Ty, V: -Offset));
579 Value *NewArg = IRB.CreateBitCast(V: Off, DestTy: Arg->getType(),
580 Name: Arg->getName() + ".unsafe-byval");
581
582 // Replace alloc with the new location.
583 replaceDbgDeclare(Address: Arg, NewAddress: BasePointer, Builder&: DIB, DIExprFlags: DIExpression::ApplyOffset,
584 Offset: -Offset);
585 Arg->replaceAllUsesWith(V: NewArg);
586 IRB.SetInsertPoint(cast<Instruction>(Val: NewArg)->getNextNode());
587 IRB.CreateMemCpy(Dst: Off, DstAlign: Align, Src: Arg, SrcAlign: Arg->getParamAlign(), Size);
588 }
589
590 // Allocate space for every unsafe static AllocaInst on the unsafe stack.
591 for (AllocaInst *AI : StaticAllocas) {
592 IRB.SetInsertPoint(AI);
593 unsigned Offset = SSL.getObjectOffset(V: AI);
594
595 replaceDbgDeclare(Address: AI, NewAddress: BasePointer, Builder&: DIB, DIExprFlags: DIExpression::ApplyOffset, Offset: -Offset);
596 replaceDbgValueForAlloca(AI, NewAllocaAddress: BasePointer, Builder&: DIB, Offset: -Offset);
597
598 // Replace uses of the alloca with the new location.
599 // Insert address calculation close to each use to work around PR27844.
600 std::string Name = std::string(AI->getName()) + ".unsafe";
601 while (!AI->use_empty()) {
602 Use &U = *AI->use_begin();
603 Instruction *User = cast<Instruction>(Val: U.getUser());
604
605 // Drop lifetime markers now that this is no longer an alloca.
606 // SafeStack has already performed its own stack coloring.
607 if (User->isLifetimeStartOrEnd()) {
608 User->eraseFromParent();
609 continue;
610 }
611
612 Instruction *InsertBefore;
613 if (auto *PHI = dyn_cast<PHINode>(Val: User))
614 InsertBefore = PHI->getIncomingBlock(U)->getTerminator();
615 else
616 InsertBefore = User;
617
618 IRBuilder<> IRBUser(InsertBefore);
619 Value *Off =
620 IRBUser.CreatePtrAdd(Ptr: BasePointer, Offset: ConstantInt::get(Ty: Int32Ty, V: -Offset));
621 Value *Replacement =
622 IRBUser.CreateAddrSpaceCast(V: Off, DestTy: AI->getType(), Name);
623
624 if (auto *PHI = dyn_cast<PHINode>(Val: User))
625 // PHI nodes may have multiple incoming edges from the same BB (why??),
626 // all must be updated at once with the same incoming value.
627 PHI->setIncomingValueForBlock(BB: PHI->getIncomingBlock(U), V: Replacement);
628 else
629 U.set(Replacement);
630 }
631
632 AI->eraseFromParent();
633 }
634
635 // Re-align BasePointer so that our callees would see it aligned as
636 // expected.
637 // FIXME: no need to update BasePointer in leaf functions.
638 unsigned FrameSize = alignTo(Size: SSL.getFrameSize(), A: StackAlignment);
639
640 MDBuilder MDB(F.getContext());
641 SmallVector<Metadata *, 2> Data;
642 Data.push_back(Elt: MDB.createString(Str: "unsafe-stack-size"));
643 Data.push_back(Elt: MDB.createConstant(C: ConstantInt::get(Ty: Int32Ty, V: FrameSize)));
644 MDNode *MD = MDTuple::get(Context&: F.getContext(), MDs: Data);
645 F.setMetadata(KindID: LLVMContext::MD_annotation, Node: MD);
646
647 // Update shadow stack pointer in the function epilogue.
648 IRB.SetInsertPoint(BasePointer->getNextNode());
649
650 Value *StaticTop =
651 IRB.CreatePtrAdd(Ptr: BasePointer, Offset: ConstantInt::get(Ty: Int32Ty, V: -FrameSize),
652 Name: "unsafe_stack_static_top");
653 IRB.CreateStore(Val: StaticTop, Ptr: UnsafeStackPtr);
654 return StaticTop;
655}
656
657void SafeStack::moveDynamicAllocasToUnsafeStack(
658 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
659 ArrayRef<AllocaInst *> DynamicAllocas) {
660 DIBuilder DIB(*F.getParent());
661
662 for (AllocaInst *AI : DynamicAllocas) {
663 IRBuilder<> IRB(AI);
664
665 // Compute the new SP value (after AI).
666 Value *Size = IRB.CreateAllocationSize(DestTy: IntPtrTy, AI);
667 Value *SP = IRB.CreatePtrToInt(V: IRB.CreateLoad(Ty: StackPtrTy, Ptr: UnsafeStackPtr),
668 DestTy: IntPtrTy);
669 SP = IRB.CreateSub(LHS: SP, RHS: Size);
670
671 // Align the SP value to satisfy the AllocaInst and stack alignments.
672 auto Align = std::max(a: AI->getAlign(), b: StackAlignment);
673
674 Value *NewTop = IRB.CreateIntToPtr(
675 V: IRB.CreateAnd(
676 LHS: SP, RHS: ConstantInt::getSigned(Ty: IntPtrTy, V: ~uint64_t(Align.value() - 1))),
677 DestTy: StackPtrTy);
678
679 // Save the stack pointer.
680 IRB.CreateStore(Val: NewTop, Ptr: UnsafeStackPtr);
681 if (DynamicTop)
682 IRB.CreateStore(Val: NewTop, Ptr: DynamicTop);
683
684 Value *NewAI = IRB.CreatePointerCast(V: NewTop, DestTy: AI->getType());
685 if (AI->hasName() && isa<Instruction>(Val: NewAI))
686 NewAI->takeName(V: AI);
687
688 replaceDbgDeclare(Address: AI, NewAddress: NewAI, Builder&: DIB, DIExprFlags: DIExpression::ApplyOffset, Offset: 0);
689 AI->replaceAllUsesWith(V: NewAI);
690 AI->eraseFromParent();
691 }
692
693 if (!DynamicAllocas.empty()) {
694 // Now go through the instructions again, replacing stacksave/stackrestore.
695 for (Instruction &I : llvm::make_early_inc_range(Range: instructions(F: &F))) {
696 auto *II = dyn_cast<IntrinsicInst>(Val: &I);
697 if (!II)
698 continue;
699
700 if (II->getIntrinsicID() == Intrinsic::stacksave) {
701 IRBuilder<> IRB(II);
702 Instruction *LI = IRB.CreateLoad(Ty: StackPtrTy, Ptr: UnsafeStackPtr);
703 LI->takeName(V: II);
704 II->replaceAllUsesWith(V: LI);
705 II->eraseFromParent();
706 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
707 IRBuilder<> IRB(II);
708 Instruction *SI = IRB.CreateStore(Val: II->getArgOperand(i: 0), Ptr: UnsafeStackPtr);
709 SI->takeName(V: II);
710 assert(II->use_empty());
711 II->eraseFromParent();
712 }
713 }
714 }
715}
716
717bool SafeStack::ShouldInlinePointerAddress(CallInst &CI) {
718 Function *Callee = CI.getCalledFunction();
719 if (CI.hasFnAttr(Kind: Attribute::AlwaysInline) &&
720 isInlineViable(Callee&: *Callee).isSuccess())
721 return true;
722 if (Callee->isInterposable() || Callee->hasFnAttribute(Kind: Attribute::NoInline) ||
723 CI.isNoInline())
724 return false;
725 return true;
726}
727
728void SafeStack::TryInlinePointerAddress() {
729 auto *CI = dyn_cast<CallInst>(Val: UnsafeStackPtr);
730 if (!CI)
731 return;
732
733 if(F.hasOptNone())
734 return;
735
736 Function *Callee = CI->getCalledFunction();
737 if (!Callee || Callee->isDeclaration())
738 return;
739
740 if (!ShouldInlinePointerAddress(CI&: *CI))
741 return;
742
743 InlineFunctionInfo IFI;
744 InlineFunction(CB&: *CI, IFI);
745}
746
747bool SafeStack::run() {
748 assert(F.hasFnAttribute(Attribute::SafeStack) &&
749 "Can't run SafeStack on a function without the attribute");
750 assert(!F.isDeclaration() && "Can't run SafeStack on a function declaration");
751
752 ++NumFunctions;
753
754 SmallVector<AllocaInst *, 16> StaticAllocas;
755 SmallVector<AllocaInst *, 4> DynamicAllocas;
756 SmallVector<Argument *, 4> ByValArguments;
757 SmallVector<Instruction *, 4> Returns;
758
759 // Collect all points where stack gets unwound and needs to be restored
760 // This is only necessary because the runtime (setjmp and unwind code) is
761 // not aware of the unsafe stack and won't unwind/restore it properly.
762 // To work around this problem without changing the runtime, we insert
763 // instrumentation to restore the unsafe stack pointer when necessary.
764 SmallVector<Instruction *, 4> StackRestorePoints;
765
766 // Find all static and dynamic alloca instructions that must be moved to the
767 // unsafe stack, all return instructions and stack restore points.
768 findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns,
769 StackRestorePoints);
770
771 if (StaticAllocas.empty() && DynamicAllocas.empty() &&
772 ByValArguments.empty() && StackRestorePoints.empty())
773 return false; // Nothing to do in this function.
774
775 if (!StaticAllocas.empty() || !DynamicAllocas.empty() ||
776 !ByValArguments.empty())
777 ++NumUnsafeStackFunctions; // This function has the unsafe stack.
778
779 if (!StackRestorePoints.empty())
780 ++NumUnsafeStackRestorePointsFunctions;
781
782 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
783 // Calls must always have a debug location, or else inlining breaks. So
784 // we explicitly set a artificial debug location here.
785 if (DISubprogram *SP = F.getSubprogram())
786 IRB.SetCurrentDebugLocation(
787 DILocation::get(Context&: SP->getContext(), Line: SP->getScopeLine(), Column: 0, Scope: SP));
788 if (SafeStackUsePointerAddress) {
789 RTLIB::LibcallImpl SafestackPointerAddressImpl =
790 Libcalls.getLibcallImpl(Call: RTLIB::SAFESTACK_POINTER_ADDRESS);
791 if (SafestackPointerAddressImpl == RTLIB::Unsupported) {
792 F.getContext().emitError(
793 ErrorStr: "no libcall available for safestack pointer address");
794 return false;
795 }
796
797 StringRef SafestackPointerAddressName =
798 RTLIB::RuntimeLibcallsInfo::getLibcallImplName(
799 CallImpl: SafestackPointerAddressImpl);
800
801 FunctionCallee Fn = F.getParent()->getOrInsertFunction(
802 Name: SafestackPointerAddressName, RetTy: IRB.getPtrTy(AddrSpace: 0));
803 UnsafeStackPtr = IRB.CreateCall(Callee: Fn);
804 } else {
805 UnsafeStackPtr = TL.getSafeStackPointerLocation(IRB, Libcalls);
806 }
807
808 // Load the current stack pointer (we'll also use it as a base pointer).
809 // FIXME: use a dedicated register for it ?
810 Instruction *BasePointer =
811 IRB.CreateLoad(Ty: StackPtrTy, Ptr: UnsafeStackPtr, isVolatile: false, Name: "unsafe_stack_ptr");
812 assert(BasePointer->getType() == StackPtrTy);
813
814 AllocaInst *StackGuardSlot = nullptr;
815 // FIXME: implement weaker forms of stack protector.
816 if (F.hasFnAttribute(Kind: Attribute::StackProtect) ||
817 F.hasFnAttribute(Kind: Attribute::StackProtectStrong) ||
818 F.hasFnAttribute(Kind: Attribute::StackProtectReq)) {
819 Value *StackGuard = getStackGuard(IRB, F);
820 StackGuardSlot = IRB.CreateAlloca(Ty: StackPtrTy, ArraySize: nullptr);
821 IRB.CreateStore(Val: StackGuard, Ptr: StackGuardSlot);
822
823 for (Instruction *RI : Returns) {
824 IRBuilder<> IRBRet(RI);
825 checkStackGuard(IRB&: IRBRet, F, RI&: *RI, StackGuardSlot, StackGuard);
826 }
827 }
828
829 // The top of the unsafe stack after all unsafe static allocas are
830 // allocated.
831 Value *StaticTop = moveStaticAllocasToUnsafeStack(
832 IRB, F, StaticAllocas, ByValArguments, BasePointer, StackGuardSlot);
833
834 // Safe stack object that stores the current unsafe stack top. It is updated
835 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
836 // This is only needed if we need to restore stack pointer after longjmp
837 // or exceptions, and we have dynamic allocations.
838 // FIXME: a better alternative might be to store the unsafe stack pointer
839 // before setjmp / invoke instructions.
840 AllocaInst *DynamicTop = createStackRestorePoints(
841 IRB, F, StackRestorePoints, StaticTop, NeedDynamicTop: !DynamicAllocas.empty());
842
843 // Handle dynamic allocas.
844 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
845 DynamicAllocas);
846
847 // Restore the unsafe stack pointer before each return.
848 for (Instruction *RI : Returns) {
849 IRB.SetInsertPoint(RI);
850 IRB.CreateStore(Val: BasePointer, Ptr: UnsafeStackPtr);
851 }
852
853 TryInlinePointerAddress();
854
855 LLVM_DEBUG(dbgs() << "[SafeStack] safestack applied\n");
856 return true;
857}
858
859class SafeStackLegacyPass : public FunctionPass {
860 const TargetMachine *TM = nullptr;
861
862public:
863 static char ID; // Pass identification, replacement for typeid..
864
865 SafeStackLegacyPass() : FunctionPass(ID) {}
866
867 void getAnalysisUsage(AnalysisUsage &AU) const override {
868 AU.addRequired<LibcallLoweringInfoWrapper>();
869 AU.addRequired<TargetPassConfig>();
870 AU.addRequired<TargetLibraryInfoWrapperPass>();
871 AU.addRequired<AssumptionCacheTracker>();
872 AU.addPreserved<DominatorTreeWrapperPass>();
873 }
874
875 bool runOnFunction(Function &F) override {
876 LLVM_DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
877
878 if (!F.hasFnAttribute(Kind: Attribute::SafeStack)) {
879 LLVM_DEBUG(dbgs() << "[SafeStack] safestack is not requested"
880 " for this function\n");
881 return false;
882 }
883
884 if (F.isDeclaration()) {
885 LLVM_DEBUG(dbgs() << "[SafeStack] function definition"
886 " is not available\n");
887 return false;
888 }
889
890 TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>();
891 const TargetSubtargetInfo *Subtarget = TM->getSubtargetImpl(F);
892 auto *TL = Subtarget->getTargetLowering();
893 if (!TL)
894 report_fatal_error(reason: "TargetLowering instance is required");
895
896 const LibcallLoweringInfo &Libcalls =
897 getAnalysis<LibcallLoweringInfoWrapper>().getLibcallLowering(
898 M: *F.getParent(), Subtarget: *Subtarget);
899
900 auto *DL = &F.getDataLayout();
901 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
902 auto &ACT = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
903
904 // Compute DT and LI only for functions that have the attribute.
905 // This is only useful because the legacy pass manager doesn't let us
906 // compute analyzes lazily.
907
908 DominatorTree *DT;
909 bool ShouldPreserveDominatorTree;
910 std::optional<DominatorTree> LazilyComputedDomTree;
911
912 // Do we already have a DominatorTree available from the previous pass?
913 // Note that we should *NOT* require it, to avoid the case where we end up
914 // not needing it, but the legacy PM would have computed it for us anyways.
915 if (auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>()) {
916 DT = &DTWP->getDomTree();
917 ShouldPreserveDominatorTree = true;
918 } else {
919 // Otherwise, we need to compute it.
920 LazilyComputedDomTree.emplace(args&: F);
921 DT = &*LazilyComputedDomTree;
922 ShouldPreserveDominatorTree = false;
923 }
924
925 // Likewise, lazily compute loop info.
926 LoopInfo LI(*DT);
927
928 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
929
930 ScalarEvolution SE(F, TLI, ACT, *DT, LI);
931
932 return SafeStack(F, *TL, Libcalls, *DL,
933 ShouldPreserveDominatorTree ? &DTU : nullptr, SE)
934 .run();
935 }
936};
937
938} // end anonymous namespace
939
940PreservedAnalyses SafeStackPass::run(Function &F,
941 FunctionAnalysisManager &FAM) {
942 LLVM_DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
943
944 if (!F.hasFnAttribute(Kind: Attribute::SafeStack)) {
945 LLVM_DEBUG(dbgs() << "[SafeStack] safestack is not requested"
946 " for this function\n");
947 return PreservedAnalyses::all();
948 }
949
950 if (F.isDeclaration()) {
951 LLVM_DEBUG(dbgs() << "[SafeStack] function definition"
952 " is not available\n");
953 return PreservedAnalyses::all();
954 }
955
956 const TargetSubtargetInfo *Subtarget = TM->getSubtargetImpl(F);
957 auto *TL = Subtarget->getTargetLowering();
958
959 auto &DL = F.getDataLayout();
960
961 // preserve DominatorTree
962 auto &DT = FAM.getResult<DominatorTreeAnalysis>(IR&: F);
963 auto &SE = FAM.getResult<ScalarEvolutionAnalysis>(IR&: F);
964
965 auto &MAMProxy = FAM.getResult<ModuleAnalysisManagerFunctionProxy>(IR&: F);
966 const LibcallLoweringModuleAnalysisResult *LibcallLowering =
967 MAMProxy.getCachedResult<LibcallLoweringModuleAnalysis>(IR&: *F.getParent());
968
969 if (!LibcallLowering) {
970 F.getContext().emitError(ErrorStr: "'" + LibcallLoweringModuleAnalysis::name() +
971 "' analysis required");
972 return PreservedAnalyses::all();
973 }
974
975 const LibcallLoweringInfo &Libcalls =
976 LibcallLowering->getLibcallLowering(Subtarget: *Subtarget);
977
978 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
979
980 bool Changed = SafeStack(F, *TL, Libcalls, DL, &DTU, SE).run();
981
982 if (!Changed)
983 return PreservedAnalyses::all();
984 PreservedAnalyses PA;
985 PA.preserve<DominatorTreeAnalysis>();
986 return PA;
987}
988
989char SafeStackLegacyPass::ID = 0;
990
991INITIALIZE_PASS_BEGIN(SafeStackLegacyPass, DEBUG_TYPE,
992 "Safe Stack instrumentation pass", false, false)
993INITIALIZE_PASS_DEPENDENCY(LibcallLoweringInfoWrapper)
994INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
995INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
996INITIALIZE_PASS_END(SafeStackLegacyPass, DEBUG_TYPE,
997 "Safe Stack instrumentation pass", false, false)
998
999FunctionPass *llvm::createSafeStackPass() { return new SafeStackLegacyPass(); }
1000