1//===-- Thumb1InstrInfo.cpp - Thumb-1 Instruction Information -------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains the Thumb-1 implementation of the TargetInstrInfo class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "Thumb1InstrInfo.h"
14#include "ARMSubtarget.h"
15#include "llvm/ADT/BitVector.h"
16#include "llvm/CodeGen/LiveRegUnits.h"
17#include "llvm/CodeGen/MachineFrameInfo.h"
18#include "llvm/CodeGen/MachineInstrBuilder.h"
19#include "llvm/CodeGen/MachineMemOperand.h"
20#include "llvm/IR/Module.h"
21#include "llvm/MC/MCInst.h"
22#include "llvm/MC/MCInstBuilder.h"
23
24using namespace llvm;
25
26Thumb1InstrInfo::Thumb1InstrInfo(const ARMSubtarget &STI)
27 : ARMBaseInstrInfo(STI, RI), RI(STI) {}
28
29/// Return the noop instruction to use for a noop.
30MCInst Thumb1InstrInfo::getNop() const {
31 return MCInstBuilder(ARM::tMOVr)
32 .addReg(Reg: ARM::R8)
33 .addReg(Reg: ARM::R8)
34 .addImm(Val: ARMCC::AL)
35 .addReg(Reg: 0);
36}
37
38unsigned Thumb1InstrInfo::getUnindexedOpcode(unsigned Opc) const {
39 return 0;
40}
41
42void Thumb1InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
43 MachineBasicBlock::iterator I,
44 const DebugLoc &DL, Register DestReg,
45 Register SrcReg, bool KillSrc,
46 bool RenamableDest, bool RenamableSrc) const {
47 // Need to check the arch.
48 MachineFunction &MF = *MBB.getParent();
49 const ARMSubtarget &st = MF.getSubtarget<ARMSubtarget>();
50
51 assert(ARM::GPRRegClass.contains(DestReg, SrcReg) &&
52 "Thumb1 can only copy GPR registers");
53
54 if (st.hasV6Ops() || ARM::hGPRRegClass.contains(Reg: SrcReg) ||
55 !ARM::tGPRRegClass.contains(Reg: DestReg))
56 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg)
57 .addReg(RegNo: SrcReg, Flags: getKillRegState(B: KillSrc))
58 .add(MOs: predOps(Pred: ARMCC::AL));
59 else {
60 const TargetRegisterInfo *RegInfo = st.getRegisterInfo();
61 LiveRegUnits UsedRegs(*RegInfo);
62 UsedRegs.addLiveOuts(MBB);
63
64 auto InstUpToI = MBB.end();
65 while (InstUpToI != I)
66 // The pre-decrement is on purpose here.
67 // We want to have the liveness right before I.
68 UsedRegs.stepBackward(MI: *--InstUpToI);
69
70 if (UsedRegs.available(Reg: ARM::CPSR)) {
71 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVSr), DestReg)
72 .addReg(RegNo: SrcReg, Flags: getKillRegState(B: KillSrc))
73 ->addRegisterDead(Reg: ARM::CPSR, RegInfo);
74 return;
75 }
76
77 // Use high register to move source to destination
78 // if movs is not an option.
79 BitVector Allocatable = RegInfo->getAllocatableSet(
80 MF, RC: RegInfo->getRegClass(i: ARM::hGPRRegClassID));
81
82 Register TmpReg = ARM::NoRegister;
83 // Prefer R12 as it is known to not be preserved anyway
84 if (UsedRegs.available(Reg: ARM::R12) && Allocatable.test(Idx: ARM::R12)) {
85 TmpReg = ARM::R12;
86 } else {
87 for (Register Reg : Allocatable.set_bits()) {
88 if (UsedRegs.available(Reg)) {
89 TmpReg = Reg;
90 break;
91 }
92 }
93 }
94
95 if (TmpReg) {
96 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg: TmpReg)
97 .addReg(RegNo: SrcReg, Flags: getKillRegState(B: KillSrc))
98 .add(MOs: predOps(Pred: ARMCC::AL));
99 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg)
100 .addReg(RegNo: TmpReg, Flags: getKillRegState(B: true))
101 .add(MOs: predOps(Pred: ARMCC::AL));
102 return;
103 }
104
105 // 'MOV lo, lo' is unpredictable on < v6, so use the stack to do it
106 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tPUSH))
107 .add(MOs: predOps(Pred: ARMCC::AL))
108 .addReg(RegNo: SrcReg, Flags: getKillRegState(B: KillSrc));
109 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tPOP))
110 .add(MOs: predOps(Pred: ARMCC::AL))
111 .addReg(RegNo: DestReg, Flags: getDefRegState(B: true));
112 }
113}
114
115void Thumb1InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
116 MachineBasicBlock::iterator I,
117 Register SrcReg, bool isKill, int FI,
118 const TargetRegisterClass *RC,
119 Register VReg,
120 MachineInstr::MIFlag Flags) const {
121 assert((RC == &ARM::tGPRRegClass ||
122 (SrcReg.isPhysical() && isARMLowRegister(SrcReg))) &&
123 "Unknown regclass!");
124
125 if (RC == &ARM::tGPRRegClass ||
126 (SrcReg.isPhysical() && isARMLowRegister(Reg: SrcReg))) {
127 DebugLoc DL;
128 if (I != MBB.end()) DL = I->getDebugLoc();
129
130 MachineFunction &MF = *MBB.getParent();
131 MachineFrameInfo &MFI = MF.getFrameInfo();
132 MachineMemOperand *MMO = MF.getMachineMemOperand(
133 PtrInfo: MachinePointerInfo::getFixedStack(MF, FI), F: MachineMemOperand::MOStore,
134 Size: MFI.getObjectSize(ObjectIdx: FI), BaseAlignment: MFI.getObjectAlign(ObjectIdx: FI));
135 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tSTRspi))
136 .addReg(RegNo: SrcReg, Flags: getKillRegState(B: isKill))
137 .addFrameIndex(Idx: FI)
138 .addImm(Val: 0)
139 .addMemOperand(MMO)
140 .add(MOs: predOps(Pred: ARMCC::AL));
141 }
142}
143
144void Thumb1InstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
145 MachineBasicBlock::iterator I,
146 Register DestReg, int FI,
147 const TargetRegisterClass *RC,
148 Register VReg, unsigned SubReg,
149 MachineInstr::MIFlag Flags) const {
150 assert((RC->hasSuperClassEq(&ARM::tGPRRegClass) ||
151 (DestReg.isPhysical() && isARMLowRegister(DestReg))) &&
152 "Unknown regclass!");
153
154 if (RC->hasSuperClassEq(RC: &ARM::tGPRRegClass) ||
155 (DestReg.isPhysical() && isARMLowRegister(Reg: DestReg))) {
156 DebugLoc DL;
157 if (I != MBB.end()) DL = I->getDebugLoc();
158
159 MachineFunction &MF = *MBB.getParent();
160 MachineFrameInfo &MFI = MF.getFrameInfo();
161 MachineMemOperand *MMO = MF.getMachineMemOperand(
162 PtrInfo: MachinePointerInfo::getFixedStack(MF, FI), F: MachineMemOperand::MOLoad,
163 Size: MFI.getObjectSize(ObjectIdx: FI), BaseAlignment: MFI.getObjectAlign(ObjectIdx: FI));
164 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tLDRspi), DestReg)
165 .addFrameIndex(Idx: FI)
166 .addImm(Val: 0)
167 .addMemOperand(MMO)
168 .add(MOs: predOps(Pred: ARMCC::AL));
169 }
170}
171
172void Thumb1InstrInfo::expandLoadStackGuard(
173 MachineBasicBlock::iterator MI) const {
174 MachineFunction &MF = *MI->getParent()->getParent();
175 const ARMSubtarget &ST = MF.getSubtarget<ARMSubtarget>();
176 const auto *GV = cast<GlobalValue>(Val: (*MI->memoperands_begin())->getValue());
177
178 assert(MF.getFunction().getParent()->getStackProtectorGuard() != "tls" &&
179 "TLS stack protector not supported for Thumb1 targets");
180
181 unsigned Instr;
182 if (!GV->isDSOLocal())
183 Instr = ARM::tLDRLIT_ga_pcrel;
184 else if (ST.genExecuteOnly() && ST.hasV8MBaselineOps())
185 Instr = ARM::t2MOVi32imm;
186 else if (ST.genExecuteOnly())
187 Instr = ARM::tMOVi32imm;
188 else
189 Instr = ARM::tLDRLIT_ga_abs;
190 expandLoadStackGuardBase(MI, LoadImmOpc: Instr, LoadOpc: ARM::tLDRi);
191}
192
193bool Thumb1InstrInfo::canCopyGluedNodeDuringSchedule(SDNode *N) const {
194 // In Thumb1 the scheduler may need to schedule a cross-copy between GPRS and CPSR
195 // but this is not always possible there, so allow the Scheduler to clone tADCS and tSBCS
196 // even if they have glue.
197 // FIXME. Actually implement the cross-copy where it is possible (post v6)
198 // because these copies entail more spilling.
199 unsigned Opcode = N->getMachineOpcode();
200 if (Opcode == ARM::tADCS || Opcode == ARM::tSBCS)
201 return true;
202
203 return false;
204}
205