1//=- AArch64MachineFunctionInfo.cpp - AArch64 Machine Function Info ---------=//
2
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9///
10/// \file
11/// This file implements AArch64-specific per-machine-function
12/// information.
13///
14//===----------------------------------------------------------------------===//
15
16#include "AArch64MachineFunctionInfo.h"
17#include "AArch64InstrInfo.h"
18#include "AArch64Subtarget.h"
19#include "llvm/IR/Constants.h"
20#include "llvm/IR/Metadata.h"
21#include "llvm/IR/Module.h"
22#include "llvm/MC/MCAsmInfo.h"
23
24using namespace llvm;
25
26yaml::AArch64FunctionInfo::AArch64FunctionInfo(
27 const llvm::AArch64FunctionInfo &MFI)
28 : HasRedZone(MFI.hasRedZone()) {}
29
30void yaml::AArch64FunctionInfo::mappingImpl(yaml::IO &YamlIO) {
31 MappingTraits<AArch64FunctionInfo>::mapping(YamlIO, MFI&: *this);
32}
33
34void AArch64FunctionInfo::initializeBaseYamlFields(
35 const yaml::AArch64FunctionInfo &YamlMFI) {
36 if (YamlMFI.HasRedZone)
37 HasRedZone = YamlMFI.HasRedZone;
38}
39
40static std::pair<bool, bool> GetSignReturnAddress(const Function &F) {
41 if (F.hasFnAttribute(Kind: "ptrauth-returns"))
42 return {true, false}; // non-leaf
43 // The function should be signed in the following situations:
44 // - sign-return-address=all
45 // - sign-return-address=non-leaf and the functions spills the LR
46 if (!F.hasFnAttribute(Kind: "sign-return-address"))
47 return {false, false};
48
49 StringRef Scope = F.getFnAttribute(Kind: "sign-return-address").getValueAsString();
50 if (Scope == "none")
51 return {false, false};
52
53 if (Scope == "all")
54 return {true, true};
55
56 assert(Scope == "non-leaf");
57 return {true, false};
58}
59
60static bool ShouldSignWithBKey(const Function &F, const AArch64Subtarget &STI) {
61 if (F.hasFnAttribute(Kind: "ptrauth-returns"))
62 return true;
63 if (!F.hasFnAttribute(Kind: "sign-return-address-key")) {
64 if (STI.getTargetTriple().isOSWindows())
65 return true;
66 return false;
67 }
68
69 const StringRef Key =
70 F.getFnAttribute(Kind: "sign-return-address-key").getValueAsString();
71 assert(Key == "a_key" || Key == "b_key");
72 return Key == "b_key";
73}
74
75static bool hasELFSignedGOTHelper(const Function &F,
76 const AArch64Subtarget *STI) {
77 if (!STI->getTargetTriple().isOSBinFormatELF())
78 return false;
79 const Module *M = F.getParent();
80 const auto *Flag = mdconst::extract_or_null<ConstantInt>(
81 MD: M->getModuleFlag(Key: "ptrauth-elf-got"));
82 if (Flag && Flag->getZExtValue() == 1)
83 return true;
84 return false;
85}
86
87AArch64FunctionInfo::AArch64FunctionInfo(const Function &F,
88 const AArch64Subtarget *STI) {
89 // If we already know that the function doesn't have a redzone, set
90 // HasRedZone here.
91 if (F.hasFnAttribute(Kind: Attribute::NoRedZone))
92 HasRedZone = false;
93 std::tie(args&: SignReturnAddress, args&: SignReturnAddressAll) = GetSignReturnAddress(F);
94 SignWithBKey = ShouldSignWithBKey(F, STI: *STI);
95 HasELFSignedGOT = hasELFSignedGOTHelper(F, STI);
96 // TODO: skip functions that have no instrumented allocas for optimization
97 IsMTETagged = F.hasFnAttribute(Kind: Attribute::SanitizeMemTag);
98
99 // BTI/PAuthLR are set on the function attribute.
100 BranchTargetEnforcement = F.hasFnAttribute(Kind: "branch-target-enforcement");
101 BranchProtectionPAuthLR = F.hasFnAttribute(Kind: "branch-protection-pauth-lr");
102
103 // Parse the SME function attributes.
104 SMEFnAttrs = SMEAttrs(F);
105
106 // The default stack probe size is 4096 if the function has no
107 // stack-probe-size attribute. This is a safe default because it is the
108 // smallest possible guard page size.
109 uint64_t ProbeSize = 4096;
110 if (F.hasFnAttribute(Kind: "stack-probe-size"))
111 ProbeSize = F.getFnAttributeAsParsedInteger(Kind: "stack-probe-size");
112 else if (const auto *PS = mdconst::extract_or_null<ConstantInt>(
113 MD: F.getParent()->getModuleFlag(Key: "stack-probe-size")))
114 ProbeSize = PS->getZExtValue();
115 assert(int64_t(ProbeSize) > 0 && "Invalid stack probe size");
116
117 if (STI->isTargetWindows()) {
118 if (!F.hasFnAttribute(Kind: "no-stack-arg-probe"))
119 StackProbeSize = ProbeSize;
120 } else {
121 // Round down to the stack alignment.
122 uint64_t StackAlign =
123 STI->getFrameLowering()->getTransientStackAlign().value();
124 ProbeSize = std::max(a: StackAlign, b: ProbeSize & ~(StackAlign - 1U));
125 StringRef ProbeKind;
126 if (F.hasFnAttribute(Kind: "probe-stack"))
127 ProbeKind = F.getFnAttribute(Kind: "probe-stack").getValueAsString();
128 else if (const auto *PS = dyn_cast_or_null<MDString>(
129 Val: F.getParent()->getModuleFlag(Key: "probe-stack")))
130 ProbeKind = PS->getString();
131 if (ProbeKind.size()) {
132 if (ProbeKind != "inline-asm")
133 report_fatal_error(reason: "Unsupported stack probing method");
134 StackProbeSize = ProbeSize;
135 }
136 }
137}
138
139MachineFunctionInfo *AArch64FunctionInfo::clone(
140 BumpPtrAllocator &Allocator, MachineFunction &DestMF,
141 const DenseMap<MachineBasicBlock *, MachineBasicBlock *> &Src2DstMBB)
142 const {
143 return DestMF.cloneInfo<AArch64FunctionInfo>(Old: *this);
144}
145
146bool AArch64FunctionInfo::shouldSignReturnAddress(bool SpillsLR) const {
147 if (!SignReturnAddress)
148 return false;
149 if (SignReturnAddressAll)
150 return true;
151 return SpillsLR;
152}
153
154static bool isLRSpilled(const MachineFunction &MF) {
155 return llvm::any_of(
156 Range: MF.getFrameInfo().getCalleeSavedInfo(),
157 P: [](const auto &Info) { return Info.getReg() == AArch64::LR; });
158}
159
160bool AArch64FunctionInfo::shouldSignReturnAddress(
161 const MachineFunction &MF) const {
162 return shouldSignReturnAddress(SpillsLR: isLRSpilled(MF));
163}
164
165bool AArch64FunctionInfo::needsShadowCallStackPrologueEpilogue(
166 MachineFunction &MF) const {
167 if (!(isLRSpilled(MF) &&
168 MF.getFunction().hasFnAttribute(Kind: Attribute::ShadowCallStack)))
169 return false;
170
171 if (!MF.getSubtarget<AArch64Subtarget>().isXRegisterReserved(i: 18))
172 report_fatal_error(reason: "Must reserve x18 to use shadow call stack");
173
174 return true;
175}
176
177bool AArch64FunctionInfo::needsDwarfUnwindInfo(
178 const MachineFunction &MF) const {
179 if (!NeedsDwarfUnwindInfo)
180 NeedsDwarfUnwindInfo = MF.needsFrameMoves() &&
181 !MF.getTarget().getMCAsmInfo()->usesWindowsCFI();
182
183 return *NeedsDwarfUnwindInfo;
184}
185
186bool AArch64FunctionInfo::needsAsyncDwarfUnwindInfo(
187 const MachineFunction &MF) const {
188 if (!NeedsAsyncDwarfUnwindInfo) {
189 const Function &F = MF.getFunction();
190 const AArch64FunctionInfo *AFI = MF.getInfo<AArch64FunctionInfo>();
191 // The check got "minsize" is because epilogue unwind info is not emitted
192 // (yet) for homogeneous epilogues, outlined functions, and functions
193 // outlined from.
194 NeedsAsyncDwarfUnwindInfo =
195 needsDwarfUnwindInfo(MF) &&
196 ((F.getUWTableKind() == UWTableKind::Async && !F.hasMinSize()) ||
197 AFI->hasStreamingModeChanges());
198 }
199 return *NeedsAsyncDwarfUnwindInfo;
200}
201