1//===-- MipsNaClELFStreamer.cpp - ELF Object Output for Mips NaCl ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements MCELFStreamer for Mips NaCl. It emits .o object files
10// as required by NaCl's SFI sandbox. It inserts address-masking instructions
11// before dangerous control-flow and memory access instructions. It inserts
12// address-masking instructions after instructions that change the stack
13// pointer. It ensures that the mask and the dangerous instruction are always
14// emitted in the same bundle. It aligns call + branch delay to the bundle end,
15// so that return address is always aligned to the start of next bundle.
16//
17//===----------------------------------------------------------------------===//
18
19#include "Mips.h"
20#include "MipsELFStreamer.h"
21#include "MipsMCNaCl.h"
22#include "llvm/MC/MCAsmBackend.h"
23#include "llvm/MC/MCAssembler.h"
24#include "llvm/MC/MCCodeEmitter.h"
25#include "llvm/MC/MCELFStreamer.h"
26#include "llvm/MC/MCInst.h"
27#include "llvm/MC/MCObjectWriter.h"
28#include "llvm/Support/ErrorHandling.h"
29#include <cassert>
30
31using namespace llvm;
32
33#define DEBUG_TYPE "mips-mc-nacl"
34
35namespace {
36
37const unsigned IndirectBranchMaskReg = Mips::T6;
38const unsigned LoadStoreStackMaskReg = Mips::T7;
39
40/// Extend the generic MCELFStreamer class so that it can mask dangerous
41/// instructions.
42
43class MipsNaClELFStreamer : public MipsELFStreamer {
44public:
45 MipsNaClELFStreamer(MCContext &Context, std::unique_ptr<MCAsmBackend> TAB,
46 std::unique_ptr<MCObjectWriter> OW,
47 std::unique_ptr<MCCodeEmitter> Emitter)
48 : MipsELFStreamer(Context, std::move(TAB), std::move(OW),
49 std::move(Emitter)) {}
50
51 ~MipsNaClELFStreamer() override = default;
52
53private:
54 // Whether we started the sandboxing sequence for calls. Calls are bundled
55 // with branch delays and aligned to the bundle end.
56 bool PendingCall = false;
57
58 bool isIndirectJump(const MCInst &MI) {
59 if (MI.getOpcode() == Mips::JALR) {
60 // MIPS32r6/MIPS64r6 doesn't have a JR instruction and uses JALR instead.
61 // JALR is an indirect branch if the link register is $0.
62 assert(MI.getOperand(0).isReg());
63 return MI.getOperand(i: 0).getReg() == Mips::ZERO;
64 }
65 return MI.getOpcode() == Mips::JR;
66 }
67
68 bool isStackPointerFirstOperand(const MCInst &MI) {
69 return (MI.getNumOperands() > 0 && MI.getOperand(i: 0).isReg()
70 && MI.getOperand(i: 0).getReg() == Mips::SP);
71 }
72
73 bool isCall(const MCInst &MI, bool *IsIndirectCall) {
74 unsigned Opcode = MI.getOpcode();
75
76 *IsIndirectCall = false;
77
78 switch (Opcode) {
79 default:
80 return false;
81
82 case Mips::JAL:
83 case Mips::BAL:
84 case Mips::BAL_BR:
85 case Mips::BLTZAL:
86 case Mips::BGEZAL:
87 return true;
88
89 case Mips::JALR:
90 // JALR is only a call if the link register is not $0. Otherwise it's an
91 // indirect branch.
92 assert(MI.getOperand(0).isReg());
93 if (MI.getOperand(i: 0).getReg() == Mips::ZERO)
94 return false;
95
96 *IsIndirectCall = true;
97 return true;
98 }
99 }
100
101 void emitMask(unsigned AddrReg, unsigned MaskReg,
102 const MCSubtargetInfo &STI) {
103 MCInst MaskInst;
104 MaskInst.setOpcode(Mips::AND);
105 MaskInst.addOperand(Op: MCOperand::createReg(Reg: AddrReg));
106 MaskInst.addOperand(Op: MCOperand::createReg(Reg: AddrReg));
107 MaskInst.addOperand(Op: MCOperand::createReg(Reg: MaskReg));
108 MipsELFStreamer::emitInstruction(Inst: MaskInst, STI);
109 }
110
111 // Sandbox indirect branch or return instruction by inserting mask operation
112 // before it.
113 void sandboxIndirectJump(const MCInst &MI, const MCSubtargetInfo &STI) {
114 unsigned AddrReg = MI.getOperand(i: 0).getReg();
115
116 emitBundleLock(AlignToEnd: false);
117 emitMask(AddrReg, MaskReg: IndirectBranchMaskReg, STI);
118 MipsELFStreamer::emitInstruction(Inst: MI, STI);
119 emitBundleUnlock();
120 }
121
122 // Sandbox memory access or SP change. Insert mask operation before and/or
123 // after the instruction.
124 void sandboxLoadStoreStackChange(const MCInst &MI, unsigned AddrIdx,
125 const MCSubtargetInfo &STI, bool MaskBefore,
126 bool MaskAfter) {
127 emitBundleLock(AlignToEnd: false);
128 if (MaskBefore) {
129 // Sandbox memory access.
130 unsigned BaseReg = MI.getOperand(i: AddrIdx).getReg();
131 emitMask(AddrReg: BaseReg, MaskReg: LoadStoreStackMaskReg, STI);
132 }
133 MipsELFStreamer::emitInstruction(Inst: MI, STI);
134 if (MaskAfter) {
135 // Sandbox SP change.
136 unsigned SPReg = MI.getOperand(i: 0).getReg();
137 assert((Mips::SP == SPReg) && "Unexpected stack-pointer register.");
138 emitMask(AddrReg: SPReg, MaskReg: LoadStoreStackMaskReg, STI);
139 }
140 emitBundleUnlock();
141 }
142
143public:
144 /// This function is the one used to emit instruction data into the ELF
145 /// streamer. We override it to mask dangerous instructions.
146 void emitInstruction(const MCInst &Inst,
147 const MCSubtargetInfo &STI) override {
148 // Sandbox indirect jumps.
149 if (isIndirectJump(MI: Inst)) {
150 if (PendingCall)
151 report_fatal_error(reason: "Dangerous instruction in branch delay slot!");
152 sandboxIndirectJump(MI: Inst, STI);
153 return;
154 }
155
156 // Sandbox loads, stores and SP changes.
157 unsigned AddrIdx = 0;
158 bool IsStore = false;
159 bool IsMemAccess = isBasePlusOffsetMemoryAccess(Opcode: Inst.getOpcode(), AddrIdx: &AddrIdx,
160 IsStore: &IsStore);
161 bool IsSPFirstOperand = isStackPointerFirstOperand(MI: Inst);
162 if (IsMemAccess || IsSPFirstOperand) {
163 bool MaskBefore = (IsMemAccess
164 && baseRegNeedsLoadStoreMask(Reg: Inst.getOperand(i: AddrIdx)
165 .getReg()));
166 bool MaskAfter = IsSPFirstOperand && !IsStore;
167 if (MaskBefore || MaskAfter) {
168 if (PendingCall)
169 report_fatal_error(reason: "Dangerous instruction in branch delay slot!");
170 sandboxLoadStoreStackChange(MI: Inst, AddrIdx, STI, MaskBefore, MaskAfter);
171 return;
172 }
173 // fallthrough
174 }
175
176 // Sandbox calls by aligning call and branch delay to the bundle end.
177 // For indirect calls, emit the mask before the call.
178 bool IsIndirectCall;
179 if (isCall(MI: Inst, IsIndirectCall: &IsIndirectCall)) {
180 if (PendingCall)
181 report_fatal_error(reason: "Dangerous instruction in branch delay slot!");
182
183 // Start the sandboxing sequence by emitting call.
184 emitBundleLock(AlignToEnd: true);
185 if (IsIndirectCall) {
186 unsigned TargetReg = Inst.getOperand(i: 1).getReg();
187 emitMask(AddrReg: TargetReg, MaskReg: IndirectBranchMaskReg, STI);
188 }
189 MipsELFStreamer::emitInstruction(Inst, STI);
190 PendingCall = true;
191 return;
192 }
193 if (PendingCall) {
194 // Finish the sandboxing sequence by emitting branch delay.
195 MipsELFStreamer::emitInstruction(Inst, STI);
196 emitBundleUnlock();
197 PendingCall = false;
198 return;
199 }
200
201 // None of the sandboxing applies, just emit the instruction.
202 MipsELFStreamer::emitInstruction(Inst, STI);
203 }
204};
205
206} // end anonymous namespace
207
208namespace llvm {
209
210bool isBasePlusOffsetMemoryAccess(unsigned Opcode, unsigned *AddrIdx,
211 bool *IsStore) {
212 if (IsStore)
213 *IsStore = false;
214
215 switch (Opcode) {
216 default:
217 return false;
218
219 // Load instructions with base address register in position 1.
220 case Mips::LB:
221 case Mips::LBu:
222 case Mips::LH:
223 case Mips::LHu:
224 case Mips::LW:
225 case Mips::LWC1:
226 case Mips::LDC1:
227 case Mips::LL:
228 case Mips::LL_R6:
229 case Mips::LWL:
230 case Mips::LWR:
231 *AddrIdx = 1;
232 return true;
233
234 // Store instructions with base address register in position 1.
235 case Mips::SB:
236 case Mips::SH:
237 case Mips::SW:
238 case Mips::SWC1:
239 case Mips::SDC1:
240 case Mips::SWL:
241 case Mips::SWR:
242 *AddrIdx = 1;
243 if (IsStore)
244 *IsStore = true;
245 return true;
246
247 // Store instructions with base address register in position 2.
248 case Mips::SC:
249 case Mips::SC_R6:
250 *AddrIdx = 2;
251 if (IsStore)
252 *IsStore = true;
253 return true;
254 }
255}
256
257bool baseRegNeedsLoadStoreMask(unsigned Reg) {
258 // The contents of SP and thread pointer register do not require masking.
259 return Reg != Mips::SP && Reg != Mips::T8;
260}
261
262MCELFStreamer *
263createMipsNaClELFStreamer(MCContext &Context, std::unique_ptr<MCAsmBackend> TAB,
264 std::unique_ptr<MCObjectWriter> OW,
265 std::unique_ptr<MCCodeEmitter> Emitter) {
266 MipsNaClELFStreamer *S = new MipsNaClELFStreamer(
267 Context, std::move(TAB), std::move(OW), std::move(Emitter));
268
269 // Set bundle-alignment as required by the NaCl ABI for the target.
270 S->emitBundleAlignMode(Alignment: MIPS_NACL_BUNDLE_ALIGN);
271
272 return S;
273}
274
275} // end namespace llvm
276