1//===-- Thumb1InstrInfo.cpp - Thumb-1 Instruction Information -------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains the Thumb-1 implementation of the TargetInstrInfo class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "Thumb1InstrInfo.h"
14#include "ARMSubtarget.h"
15#include "llvm/ADT/BitVector.h"
16#include "llvm/CodeGen/LiveRegUnits.h"
17#include "llvm/CodeGen/MachineFrameInfo.h"
18#include "llvm/CodeGen/MachineInstrBuilder.h"
19#include "llvm/CodeGen/MachineMemOperand.h"
20#include "llvm/IR/Module.h"
21#include "llvm/MC/MCInst.h"
22#include "llvm/MC/MCInstBuilder.h"
23
24using namespace llvm;
25
26Thumb1InstrInfo::Thumb1InstrInfo(const ARMSubtarget &STI)
27 : ARMBaseInstrInfo(STI) {}
28
29/// Return the noop instruction to use for a noop.
30MCInst Thumb1InstrInfo::getNop() const {
31 return MCInstBuilder(ARM::tMOVr)
32 .addReg(Reg: ARM::R8)
33 .addReg(Reg: ARM::R8)
34 .addImm(Val: ARMCC::AL)
35 .addReg(Reg: 0);
36}
37
38unsigned Thumb1InstrInfo::getUnindexedOpcode(unsigned Opc) const {
39 return 0;
40}
41
42void Thumb1InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
43 MachineBasicBlock::iterator I,
44 const DebugLoc &DL, Register DestReg,
45 Register SrcReg, bool KillSrc,
46 bool RenamableDest, bool RenamableSrc) const {
47 // Need to check the arch.
48 MachineFunction &MF = *MBB.getParent();
49 const ARMSubtarget &st = MF.getSubtarget<ARMSubtarget>();
50
51 assert(ARM::GPRRegClass.contains(DestReg, SrcReg) &&
52 "Thumb1 can only copy GPR registers");
53
54 if (st.hasV6Ops() || ARM::hGPRRegClass.contains(Reg: SrcReg) ||
55 !ARM::tGPRRegClass.contains(Reg: DestReg))
56 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg)
57 .addReg(RegNo: SrcReg, flags: getKillRegState(B: KillSrc))
58 .add(MOs: predOps(Pred: ARMCC::AL));
59 else {
60 const TargetRegisterInfo *RegInfo = st.getRegisterInfo();
61 LiveRegUnits UsedRegs(*RegInfo);
62 UsedRegs.addLiveOuts(MBB);
63
64 auto InstUpToI = MBB.end();
65 while (InstUpToI != I)
66 // The pre-decrement is on purpose here.
67 // We want to have the liveness right before I.
68 UsedRegs.stepBackward(MI: *--InstUpToI);
69
70 if (UsedRegs.available(Reg: ARM::CPSR)) {
71 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVSr), DestReg)
72 .addReg(RegNo: SrcReg, flags: getKillRegState(B: KillSrc))
73 ->addRegisterDead(Reg: ARM::CPSR, RegInfo);
74 return;
75 }
76
77 // Use high register to move source to destination
78 // if movs is not an option.
79 BitVector Allocatable = RegInfo->getAllocatableSet(
80 MF, RC: RegInfo->getRegClass(i: ARM::hGPRRegClassID));
81
82 Register TmpReg = ARM::NoRegister;
83 // Prefer R12 as it is known to not be preserved anyway
84 if (UsedRegs.available(Reg: ARM::R12) && Allocatable.test(Idx: ARM::R12)) {
85 TmpReg = ARM::R12;
86 } else {
87 for (Register Reg : Allocatable.set_bits()) {
88 if (UsedRegs.available(Reg)) {
89 TmpReg = Reg;
90 break;
91 }
92 }
93 }
94
95 if (TmpReg) {
96 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg: TmpReg)
97 .addReg(RegNo: SrcReg, flags: getKillRegState(B: KillSrc))
98 .add(MOs: predOps(Pred: ARMCC::AL));
99 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tMOVr), DestReg)
100 .addReg(RegNo: TmpReg, flags: getKillRegState(B: true))
101 .add(MOs: predOps(Pred: ARMCC::AL));
102 return;
103 }
104
105 // 'MOV lo, lo' is unpredictable on < v6, so use the stack to do it
106 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tPUSH))
107 .add(MOs: predOps(Pred: ARMCC::AL))
108 .addReg(RegNo: SrcReg, flags: getKillRegState(B: KillSrc));
109 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tPOP))
110 .add(MOs: predOps(Pred: ARMCC::AL))
111 .addReg(RegNo: DestReg, flags: getDefRegState(B: true));
112 }
113}
114
115void Thumb1InstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
116 MachineBasicBlock::iterator I,
117 Register SrcReg, bool isKill, int FI,
118 const TargetRegisterClass *RC,
119 const TargetRegisterInfo *TRI,
120 Register VReg,
121 MachineInstr::MIFlag Flags) const {
122 assert((RC == &ARM::tGPRRegClass ||
123 (SrcReg.isPhysical() && isARMLowRegister(SrcReg))) &&
124 "Unknown regclass!");
125
126 if (RC == &ARM::tGPRRegClass ||
127 (SrcReg.isPhysical() && isARMLowRegister(Reg: SrcReg))) {
128 DebugLoc DL;
129 if (I != MBB.end()) DL = I->getDebugLoc();
130
131 MachineFunction &MF = *MBB.getParent();
132 MachineFrameInfo &MFI = MF.getFrameInfo();
133 MachineMemOperand *MMO = MF.getMachineMemOperand(
134 PtrInfo: MachinePointerInfo::getFixedStack(MF, FI), F: MachineMemOperand::MOStore,
135 Size: MFI.getObjectSize(ObjectIdx: FI), BaseAlignment: MFI.getObjectAlign(ObjectIdx: FI));
136 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tSTRspi))
137 .addReg(RegNo: SrcReg, flags: getKillRegState(B: isKill))
138 .addFrameIndex(Idx: FI)
139 .addImm(Val: 0)
140 .addMemOperand(MMO)
141 .add(MOs: predOps(Pred: ARMCC::AL));
142 }
143}
144
145void Thumb1InstrInfo::loadRegFromStackSlot(
146 MachineBasicBlock &MBB, MachineBasicBlock::iterator I, Register DestReg,
147 int FI, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI,
148 Register VReg, MachineInstr::MIFlag Flags) const {
149 assert((RC->hasSuperClassEq(&ARM::tGPRRegClass) ||
150 (DestReg.isPhysical() && isARMLowRegister(DestReg))) &&
151 "Unknown regclass!");
152
153 if (RC->hasSuperClassEq(RC: &ARM::tGPRRegClass) ||
154 (DestReg.isPhysical() && isARMLowRegister(Reg: DestReg))) {
155 DebugLoc DL;
156 if (I != MBB.end()) DL = I->getDebugLoc();
157
158 MachineFunction &MF = *MBB.getParent();
159 MachineFrameInfo &MFI = MF.getFrameInfo();
160 MachineMemOperand *MMO = MF.getMachineMemOperand(
161 PtrInfo: MachinePointerInfo::getFixedStack(MF, FI), F: MachineMemOperand::MOLoad,
162 Size: MFI.getObjectSize(ObjectIdx: FI), BaseAlignment: MFI.getObjectAlign(ObjectIdx: FI));
163 BuildMI(BB&: MBB, I, MIMD: DL, MCID: get(Opcode: ARM::tLDRspi), DestReg)
164 .addFrameIndex(Idx: FI)
165 .addImm(Val: 0)
166 .addMemOperand(MMO)
167 .add(MOs: predOps(Pred: ARMCC::AL));
168 }
169}
170
171void Thumb1InstrInfo::expandLoadStackGuard(
172 MachineBasicBlock::iterator MI) const {
173 MachineFunction &MF = *MI->getParent()->getParent();
174 const ARMSubtarget &ST = MF.getSubtarget<ARMSubtarget>();
175 const auto *GV = cast<GlobalValue>(Val: (*MI->memoperands_begin())->getValue());
176
177 assert(MF.getFunction().getParent()->getStackProtectorGuard() != "tls" &&
178 "TLS stack protector not supported for Thumb1 targets");
179
180 unsigned Instr;
181 if (!GV->isDSOLocal())
182 Instr = ARM::tLDRLIT_ga_pcrel;
183 else if (ST.genExecuteOnly() && ST.hasV8MBaselineOps())
184 Instr = ARM::t2MOVi32imm;
185 else if (ST.genExecuteOnly())
186 Instr = ARM::tMOVi32imm;
187 else
188 Instr = ARM::tLDRLIT_ga_abs;
189 expandLoadStackGuardBase(MI, LoadImmOpc: Instr, LoadOpc: ARM::tLDRi);
190}
191
192bool Thumb1InstrInfo::canCopyGluedNodeDuringSchedule(SDNode *N) const {
193 // In Thumb1 the scheduler may need to schedule a cross-copy between GPRS and CPSR
194 // but this is not always possible there, so allow the Scheduler to clone tADCS and tSBCS
195 // even if they have glue.
196 // FIXME. Actually implement the cross-copy where it is possible (post v6)
197 // because these copies entail more spilling.
198 unsigned Opcode = N->getMachineOpcode();
199 if (Opcode == ARM::tADCS || Opcode == ARM::tSBCS)
200 return true;
201
202 return false;
203}
204